var/home/core/zuul-output/0000755000175000017500000000000015114762127014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114776010015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003175354215114776001017711 0ustar rootrootDec 06 08:11:56 crc systemd[1]: Starting Kubernetes Kubelet... Dec 06 08:11:56 crc restorecon[4699]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:56 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 06 08:11:57 crc restorecon[4699]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 06 08:11:57 crc kubenswrapper[4763]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.574937 4763 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577813 4763 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577834 4763 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577838 4763 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577843 4763 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577848 4763 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577853 4763 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577856 4763 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577860 4763 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577864 4763 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577867 4763 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577871 4763 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577875 4763 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577879 4763 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577883 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577888 4763 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577917 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577922 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577926 4763 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577931 4763 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577934 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577938 4763 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577941 4763 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577946 4763 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577951 4763 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577954 4763 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577959 4763 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577964 4763 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577967 4763 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577971 4763 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577974 4763 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577978 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577981 4763 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577984 4763 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577988 4763 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577992 4763 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577996 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.577999 4763 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578003 4763 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578007 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578011 4763 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578014 4763 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578017 4763 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578021 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578024 4763 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578028 4763 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578031 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578035 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578038 4763 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578042 4763 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578045 4763 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578051 4763 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578055 4763 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578058 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578061 4763 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578065 4763 feature_gate.go:330] unrecognized feature gate: Example Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578069 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578072 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578075 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578079 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578082 4763 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578085 4763 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578089 4763 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578092 4763 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578095 4763 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578098 4763 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578102 4763 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578106 4763 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578111 4763 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578115 4763 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578119 4763 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.578123 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578327 4763 flags.go:64] FLAG: --address="0.0.0.0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578374 4763 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578384 4763 flags.go:64] FLAG: --anonymous-auth="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578390 4763 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578395 4763 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578400 4763 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578406 4763 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578411 4763 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578415 4763 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578419 4763 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578423 4763 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578428 4763 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578433 4763 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578438 4763 flags.go:64] FLAG: --cgroup-root="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578442 4763 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578446 4763 flags.go:64] FLAG: --client-ca-file="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578450 4763 flags.go:64] FLAG: --cloud-config="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578454 4763 flags.go:64] FLAG: --cloud-provider="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578458 4763 flags.go:64] FLAG: --cluster-dns="[]" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578463 4763 flags.go:64] FLAG: --cluster-domain="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578467 4763 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578471 4763 flags.go:64] FLAG: --config-dir="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578475 4763 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578479 4763 flags.go:64] FLAG: --container-log-max-files="5" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578485 4763 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578488 4763 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578492 4763 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578497 4763 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578501 4763 flags.go:64] FLAG: --contention-profiling="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578505 4763 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578509 4763 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578513 4763 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578517 4763 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578522 4763 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578526 4763 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578530 4763 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578535 4763 flags.go:64] FLAG: --enable-load-reader="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578539 4763 flags.go:64] FLAG: --enable-server="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578543 4763 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578548 4763 flags.go:64] FLAG: --event-burst="100" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578552 4763 flags.go:64] FLAG: --event-qps="50" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578557 4763 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578561 4763 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578566 4763 flags.go:64] FLAG: --eviction-hard="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578571 4763 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578575 4763 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578579 4763 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578584 4763 flags.go:64] FLAG: --eviction-soft="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578588 4763 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578592 4763 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578596 4763 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578600 4763 flags.go:64] FLAG: --experimental-mounter-path="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578604 4763 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578608 4763 flags.go:64] FLAG: --fail-swap-on="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578612 4763 flags.go:64] FLAG: --feature-gates="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578617 4763 flags.go:64] FLAG: --file-check-frequency="20s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578621 4763 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578625 4763 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578629 4763 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578633 4763 flags.go:64] FLAG: --healthz-port="10248" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578640 4763 flags.go:64] FLAG: --help="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578643 4763 flags.go:64] FLAG: --hostname-override="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578647 4763 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578651 4763 flags.go:64] FLAG: --http-check-frequency="20s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578655 4763 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578660 4763 flags.go:64] FLAG: --image-credential-provider-config="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578663 4763 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578667 4763 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578671 4763 flags.go:64] FLAG: --image-service-endpoint="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578675 4763 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578678 4763 flags.go:64] FLAG: --kube-api-burst="100" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578683 4763 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578687 4763 flags.go:64] FLAG: --kube-api-qps="50" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578691 4763 flags.go:64] FLAG: --kube-reserved="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578695 4763 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578699 4763 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578703 4763 flags.go:64] FLAG: --kubelet-cgroups="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578707 4763 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578711 4763 flags.go:64] FLAG: --lock-file="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578715 4763 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578719 4763 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578723 4763 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578729 4763 flags.go:64] FLAG: --log-json-split-stream="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578733 4763 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578737 4763 flags.go:64] FLAG: --log-text-split-stream="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578741 4763 flags.go:64] FLAG: --logging-format="text" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578745 4763 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578749 4763 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578752 4763 flags.go:64] FLAG: --manifest-url="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578756 4763 flags.go:64] FLAG: --manifest-url-header="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578762 4763 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578766 4763 flags.go:64] FLAG: --max-open-files="1000000" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578772 4763 flags.go:64] FLAG: --max-pods="110" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578776 4763 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578781 4763 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578785 4763 flags.go:64] FLAG: --memory-manager-policy="None" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578789 4763 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578793 4763 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578797 4763 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578801 4763 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578810 4763 flags.go:64] FLAG: --node-status-max-images="50" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578814 4763 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578818 4763 flags.go:64] FLAG: --oom-score-adj="-999" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578822 4763 flags.go:64] FLAG: --pod-cidr="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578826 4763 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578832 4763 flags.go:64] FLAG: --pod-manifest-path="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578836 4763 flags.go:64] FLAG: --pod-max-pids="-1" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578840 4763 flags.go:64] FLAG: --pods-per-core="0" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578846 4763 flags.go:64] FLAG: --port="10250" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578850 4763 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578854 4763 flags.go:64] FLAG: --provider-id="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578858 4763 flags.go:64] FLAG: --qos-reserved="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578862 4763 flags.go:64] FLAG: --read-only-port="10255" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578866 4763 flags.go:64] FLAG: --register-node="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578870 4763 flags.go:64] FLAG: --register-schedulable="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578874 4763 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578881 4763 flags.go:64] FLAG: --registry-burst="10" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578885 4763 flags.go:64] FLAG: --registry-qps="5" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578889 4763 flags.go:64] FLAG: --reserved-cpus="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578893 4763 flags.go:64] FLAG: --reserved-memory="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578913 4763 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578918 4763 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578921 4763 flags.go:64] FLAG: --rotate-certificates="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578925 4763 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578929 4763 flags.go:64] FLAG: --runonce="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578934 4763 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578938 4763 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578942 4763 flags.go:64] FLAG: --seccomp-default="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578945 4763 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578950 4763 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578954 4763 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578958 4763 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578963 4763 flags.go:64] FLAG: --storage-driver-password="root" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578966 4763 flags.go:64] FLAG: --storage-driver-secure="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578970 4763 flags.go:64] FLAG: --storage-driver-table="stats" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578974 4763 flags.go:64] FLAG: --storage-driver-user="root" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578978 4763 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578982 4763 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578986 4763 flags.go:64] FLAG: --system-cgroups="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578990 4763 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.578997 4763 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579002 4763 flags.go:64] FLAG: --tls-cert-file="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579005 4763 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579011 4763 flags.go:64] FLAG: --tls-min-version="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579015 4763 flags.go:64] FLAG: --tls-private-key-file="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579018 4763 flags.go:64] FLAG: --topology-manager-policy="none" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579022 4763 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579026 4763 flags.go:64] FLAG: --topology-manager-scope="container" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579030 4763 flags.go:64] FLAG: --v="2" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579036 4763 flags.go:64] FLAG: --version="false" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579041 4763 flags.go:64] FLAG: --vmodule="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579045 4763 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.579050 4763 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580665 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580716 4763 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580967 4763 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580985 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580991 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.580999 4763 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581004 4763 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581009 4763 feature_gate.go:330] unrecognized feature gate: Example Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581014 4763 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581021 4763 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581026 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581031 4763 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581035 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581040 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581044 4763 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581051 4763 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581114 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581120 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581125 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581130 4763 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581135 4763 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581140 4763 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581145 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581149 4763 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581153 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581158 4763 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581163 4763 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581168 4763 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581173 4763 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581178 4763 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581182 4763 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581187 4763 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581192 4763 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581197 4763 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581201 4763 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581205 4763 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581211 4763 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581217 4763 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581222 4763 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581226 4763 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581231 4763 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581236 4763 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581240 4763 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581245 4763 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581249 4763 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581253 4763 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581257 4763 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581261 4763 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581265 4763 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581269 4763 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581274 4763 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581279 4763 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581284 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581288 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581292 4763 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581296 4763 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581301 4763 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581306 4763 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581309 4763 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581313 4763 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581316 4763 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581320 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581324 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581328 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581332 4763 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581335 4763 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581340 4763 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581344 4763 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581350 4763 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581354 4763 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.581358 4763 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.581364 4763 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.589361 4763 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.589400 4763 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589469 4763 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589482 4763 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589486 4763 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589491 4763 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589495 4763 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589500 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589505 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589510 4763 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589514 4763 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589519 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589523 4763 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589527 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589530 4763 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589535 4763 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589538 4763 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589543 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589547 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589551 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589555 4763 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589559 4763 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589563 4763 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589567 4763 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589571 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589575 4763 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589579 4763 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589583 4763 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589587 4763 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589591 4763 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589595 4763 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589599 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589603 4763 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589607 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589611 4763 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589616 4763 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589621 4763 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589624 4763 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589628 4763 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589632 4763 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589636 4763 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589640 4763 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589645 4763 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589649 4763 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589653 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589658 4763 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589662 4763 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589667 4763 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589672 4763 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589676 4763 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589680 4763 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589685 4763 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589689 4763 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589694 4763 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589699 4763 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589704 4763 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589709 4763 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589712 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589716 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589720 4763 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589724 4763 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589727 4763 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589731 4763 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589735 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589739 4763 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589742 4763 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589746 4763 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589750 4763 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589754 4763 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589758 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589762 4763 feature_gate.go:330] unrecognized feature gate: Example Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589766 4763 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589770 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.589779 4763 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589917 4763 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589926 4763 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589933 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589938 4763 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589943 4763 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589947 4763 feature_gate.go:330] unrecognized feature gate: Example Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589951 4763 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589955 4763 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589959 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589963 4763 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589967 4763 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589971 4763 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589975 4763 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589978 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589982 4763 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589986 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589990 4763 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.589995 4763 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590000 4763 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590006 4763 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590010 4763 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590014 4763 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590018 4763 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590022 4763 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590026 4763 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590030 4763 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590034 4763 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590038 4763 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590042 4763 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590045 4763 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590049 4763 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590054 4763 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590057 4763 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590063 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590068 4763 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590072 4763 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590075 4763 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590079 4763 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590082 4763 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590086 4763 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590089 4763 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590093 4763 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590097 4763 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590100 4763 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590104 4763 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590107 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590112 4763 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590116 4763 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590120 4763 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590125 4763 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590129 4763 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590133 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590137 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590141 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590145 4763 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590150 4763 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590153 4763 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590157 4763 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590160 4763 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590165 4763 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590169 4763 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590173 4763 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590177 4763 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590183 4763 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590188 4763 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590197 4763 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590204 4763 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590208 4763 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590213 4763 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590219 4763 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.590225 4763 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.590233 4763 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.590465 4763 server.go:940] "Client rotation is on, will bootstrap in background" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.592758 4763 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.592859 4763 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.593419 4763 server.go:997] "Starting client certificate rotation" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.593440 4763 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.593616 4763 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-17 05:32:21.91531727 +0000 UTC Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.593712 4763 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1005h20m24.321609308s for next certificate rotation Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.608892 4763 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.610822 4763 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.620286 4763 log.go:25] "Validated CRI v1 runtime API" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.632767 4763 log.go:25] "Validated CRI v1 image API" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.634057 4763 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.636551 4763 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-06-08-06-52-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.636583 4763 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.652224 4763 manager.go:217] Machine: {Timestamp:2025-12-06 08:11:57.650613416 +0000 UTC m=+0.226318474 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:259aca8b-65ad-43d0-8d85-5abeeb8d07d2 BootID:82a904c2-cbc6-428b-8701-f59e17ed8d49 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ce:09:c1 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:ce:09:c1 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:96:54:ac Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:38:97:00 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e1:b2:70 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:5a:c4:d2 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:b2:e6:37:42:63:1d Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:b6:c3:0d:ac:10:0d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.652474 4763 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.652611 4763 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.652955 4763 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653312 4763 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653351 4763 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653589 4763 topology_manager.go:138] "Creating topology manager with none policy" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653603 4763 container_manager_linux.go:303] "Creating device plugin manager" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653826 4763 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.653866 4763 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.654287 4763 state_mem.go:36] "Initialized new in-memory state store" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.654411 4763 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.656531 4763 kubelet.go:418] "Attempting to sync node with API server" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.656561 4763 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.656600 4763 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.656619 4763 kubelet.go:324] "Adding apiserver pod source" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.656634 4763 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.659263 4763 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.659815 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.659935 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.659819 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.659987 4763 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.659998 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.660864 4763 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661505 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661531 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661538 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661546 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661565 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661576 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661584 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661597 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661607 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661617 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661631 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661639 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.661845 4763 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.662479 4763 server.go:1280] "Started kubelet" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.662820 4763 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.663243 4763 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.663954 4763 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.664196 4763 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:57 crc systemd[1]: Started Kubernetes Kubelet. Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.667620 4763 server.go:460] "Adding debug handlers to kubelet server" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671249 4763 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671334 4763 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671526 4763 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:35:38.302050043 +0000 UTC Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.671601 4763 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671700 4763 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671718 4763 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.671764 4763 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.672929 4763 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.672945 4763 factory.go:55] Registering systemd factory Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.672953 4763 factory.go:221] Registration of the systemd container factory successfully Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.673231 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.673333 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.673429 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="200ms" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.674297 4763 factory.go:153] Registering CRI-O factory Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.674311 4763 factory.go:221] Registration of the crio container factory successfully Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.674335 4763 factory.go:103] Registering Raw factory Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.674395 4763 manager.go:1196] Started watching for new ooms in manager Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.674946 4763 manager.go:319] Starting recovery of all containers Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.675158 4763 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.18:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e9213c9bc9e1b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 08:11:57.662428699 +0000 UTC m=+0.238133737,LastTimestamp:2025-12-06 08:11:57.662428699 +0000 UTC m=+0.238133737,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.678928 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.678992 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679007 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679021 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679032 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679043 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679056 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679067 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679081 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679092 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679104 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679114 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679130 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679140 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679151 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679181 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679190 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679200 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679208 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679218 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679228 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679240 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679249 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679257 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679267 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679277 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679289 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679298 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679310 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679320 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679329 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679338 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679348 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679356 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679364 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679373 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679381 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679390 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679398 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679406 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679414 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679423 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679432 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679441 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679451 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679460 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679470 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679479 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679494 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679504 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679514 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679523 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679536 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679546 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679555 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679564 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679574 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679582 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679592 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679603 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679614 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679623 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679633 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679642 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679694 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679703 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679714 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679723 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679733 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679743 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679754 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679763 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679773 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679783 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679791 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679801 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679811 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679822 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679835 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679844 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679854 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679863 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679873 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679881 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679891 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679921 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679934 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679944 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679955 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679965 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679976 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679986 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.679997 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680007 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680018 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680029 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680038 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680049 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680060 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680069 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680081 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680092 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680104 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680114 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680131 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680142 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680153 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680164 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680176 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680187 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680200 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680210 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680222 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680233 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680243 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680252 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680262 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680272 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680281 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680290 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680300 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680309 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680321 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680330 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680341 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680349 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680358 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680368 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680377 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680386 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680397 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680406 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680417 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680426 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680435 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680445 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680456 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680466 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680476 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680485 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680497 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680507 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680525 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680535 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680545 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680564 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680574 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680586 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680596 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680608 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680617 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680627 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680637 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680647 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680658 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680667 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680677 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680687 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680697 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680705 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680715 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680725 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680735 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680744 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680754 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680764 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680773 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680785 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680795 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680805 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680816 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680825 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680838 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680848 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680860 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680870 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680879 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680889 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680929 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680943 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680953 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680962 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680972 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680981 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.680991 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681001 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681010 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681025 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681035 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681044 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681054 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681063 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681071 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681613 4763 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681634 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681645 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681686 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681696 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681706 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681715 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681725 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681734 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681742 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681751 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681762 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681778 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681788 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681797 4763 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681828 4763 reconstruct.go:97] "Volume reconstruction finished" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.681836 4763 reconciler.go:26] "Reconciler: start to sync state" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.698288 4763 manager.go:324] Recovery completed Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.710882 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.712920 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.712969 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.712985 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.713822 4763 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.713837 4763 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.713857 4763 state_mem.go:36] "Initialized new in-memory state store" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.716567 4763 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.718177 4763 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.718231 4763 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.718275 4763 kubelet.go:2335] "Starting kubelet main sync loop" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.718475 4763 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 06 08:11:57 crc kubenswrapper[4763]: W1206 08:11:57.719131 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.719208 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.772202 4763 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.807359 4763 policy_none.go:49] "None policy: Start" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.809201 4763 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.809240 4763 state_mem.go:35] "Initializing new in-memory state store" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.819428 4763 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.869917 4763 manager.go:334] "Starting Device Plugin manager" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.869983 4763 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.869997 4763 server.go:79] "Starting device plugin registration server" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.870438 4763 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.870454 4763 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.870893 4763 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.871088 4763 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.871114 4763 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.873961 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="400ms" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.880191 4763 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.971421 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.973216 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.973354 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.973376 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:57 crc kubenswrapper[4763]: I1206 08:11:57.973430 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:11:57 crc kubenswrapper[4763]: E1206 08:11:57.974222 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.18:6443: connect: connection refused" node="crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.020072 4763 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.020183 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021406 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021432 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021441 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021553 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021812 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.021941 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022233 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022261 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022271 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022404 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022836 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.022916 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023145 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023165 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023173 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023253 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023619 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.023648 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024352 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024381 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024398 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024411 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024444 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024449 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024477 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024454 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024489 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024705 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024788 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.024812 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025117 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025137 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025146 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025648 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025670 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025677 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025701 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025719 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025730 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025798 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.025817 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.026402 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.026434 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.026445 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087599 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087636 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087658 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087673 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087689 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087744 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087777 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087804 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087840 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087858 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087876 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087921 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087940 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.087957 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.088058 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.175113 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.176574 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.176606 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.176615 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.176645 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:11:58 crc kubenswrapper[4763]: E1206 08:11:58.177167 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.18:6443: connect: connection refused" node="crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189380 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189413 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189435 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189456 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189474 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189492 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189508 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189524 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189542 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189558 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189576 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189594 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189613 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189630 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189647 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.189940 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190013 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190052 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190118 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190149 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190175 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190155 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190206 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190223 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190233 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190213 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190323 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190457 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.190522 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: E1206 08:11:58.275294 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="800ms" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.401959 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.418064 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.426529 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.431443 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3e627d43bc11acaa65a8529013ce5f48c434b00263f8a91d6606cb9287e51965 WatchSource:0}: Error finding container 3e627d43bc11acaa65a8529013ce5f48c434b00263f8a91d6606cb9287e51965: Status 404 returned error can't find the container with id 3e627d43bc11acaa65a8529013ce5f48c434b00263f8a91d6606cb9287e51965 Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.434258 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-dc05083a86580132cfc60610360a7acf568668a8f5c5a498124b02c4dc5de030 WatchSource:0}: Error finding container dc05083a86580132cfc60610360a7acf568668a8f5c5a498124b02c4dc5de030: Status 404 returned error can't find the container with id dc05083a86580132cfc60610360a7acf568668a8f5c5a498124b02c4dc5de030 Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.437441 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.445575 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.447226 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-df7683ed2130cff33da8ec8cfeffd8741daae9a81307c5db3f94a7ab7bcddd19 WatchSource:0}: Error finding container df7683ed2130cff33da8ec8cfeffd8741daae9a81307c5db3f94a7ab7bcddd19: Status 404 returned error can't find the container with id df7683ed2130cff33da8ec8cfeffd8741daae9a81307c5db3f94a7ab7bcddd19 Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.455407 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ef3679380d141e8abcee0c972d1938e60f79fd1882c8a0c88038eba35b12c5b5 WatchSource:0}: Error finding container ef3679380d141e8abcee0c972d1938e60f79fd1882c8a0c88038eba35b12c5b5: Status 404 returned error can't find the container with id ef3679380d141e8abcee0c972d1938e60f79fd1882c8a0c88038eba35b12c5b5 Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.474874 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-692d1dd710a65ada0e44a3ec9064960f073c4a7fc90f25c6b6780fb01d583fd4 WatchSource:0}: Error finding container 692d1dd710a65ada0e44a3ec9064960f073c4a7fc90f25c6b6780fb01d583fd4: Status 404 returned error can't find the container with id 692d1dd710a65ada0e44a3ec9064960f073c4a7fc90f25c6b6780fb01d583fd4 Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.578115 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.579299 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.579348 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.579359 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.579381 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:11:58 crc kubenswrapper[4763]: E1206 08:11:58.579796 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.18:6443: connect: connection refused" node="crc" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.665156 4763 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.672171 4763 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 08:39:58.877078148 +0000 UTC Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.672216 4763 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 624h28m0.204864634s for next certificate rotation Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.713405 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:58 crc kubenswrapper[4763]: E1206 08:11:58.713484 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.722478 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"692d1dd710a65ada0e44a3ec9064960f073c4a7fc90f25c6b6780fb01d583fd4"} Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.723485 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ef3679380d141e8abcee0c972d1938e60f79fd1882c8a0c88038eba35b12c5b5"} Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.724386 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"df7683ed2130cff33da8ec8cfeffd8741daae9a81307c5db3f94a7ab7bcddd19"} Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.725276 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dc05083a86580132cfc60610360a7acf568668a8f5c5a498124b02c4dc5de030"} Dec 06 08:11:58 crc kubenswrapper[4763]: I1206 08:11:58.726140 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3e627d43bc11acaa65a8529013ce5f48c434b00263f8a91d6606cb9287e51965"} Dec 06 08:11:58 crc kubenswrapper[4763]: W1206 08:11:58.813651 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:58 crc kubenswrapper[4763]: E1206 08:11:58.814317 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:59 crc kubenswrapper[4763]: E1206 08:11:59.076608 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="1.6s" Dec 06 08:11:59 crc kubenswrapper[4763]: W1206 08:11:59.135389 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:59 crc kubenswrapper[4763]: E1206 08:11:59.135560 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:59 crc kubenswrapper[4763]: W1206 08:11:59.212749 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:59 crc kubenswrapper[4763]: E1206 08:11:59.212836 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.18:6443: connect: connection refused" logger="UnhandledError" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.380516 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.381724 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.381756 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.381769 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.381795 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:11:59 crc kubenswrapper[4763]: E1206 08:11:59.382216 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.18:6443: connect: connection refused" node="crc" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.665878 4763 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.18:6443: connect: connection refused Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.731006 4763 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b880fdc6c6709fd94beb04c3dedc123b15b165ecbc1c1801c55dbe3e26cfc4b5" exitCode=0 Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.731145 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b880fdc6c6709fd94beb04c3dedc123b15b165ecbc1c1801c55dbe3e26cfc4b5"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.731188 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.732187 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.732270 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.732301 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.733119 4763 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="fad1d9036c220c3af22b957b3b594f478a5c6b4fa1097c249e7f3f2b843565ce" exitCode=0 Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.733214 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"fad1d9036c220c3af22b957b3b594f478a5c6b4fa1097c249e7f3f2b843565ce"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.733261 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.734450 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.734500 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.734511 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.735473 4763 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="09a9b5a533357b10fc7bfdf06e98102c8f7f8c648e0b1be21296aa8b92725610" exitCode=0 Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.735542 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"09a9b5a533357b10fc7bfdf06e98102c8f7f8c648e0b1be21296aa8b92725610"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.735606 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.736806 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.736857 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.736878 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.739239 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3879440a90dfdaac1409b79fae77e8365f47e327b7c6dfc1a08ace1b6890e455"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.739291 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2b9c99cb8d531d4a215cbc974376de5d5ea688573a02f12e8b176096ba1eb17e"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.739316 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8c6ec754f10ad76b6a83cebf646b1a65a28e07f94917d4032ded6e3ee9b493f9"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.739318 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.739340 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.740186 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.740221 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.740234 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.741341 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6" exitCode=0 Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.741392 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6"} Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.741493 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.742693 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.742732 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.742750 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.745401 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.746279 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.746376 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:11:59 crc kubenswrapper[4763]: I1206 08:11:59.746398 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747091 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747155 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747168 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747177 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747185 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.747196 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.748075 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.748107 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.748117 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.750159 4763 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b6d0c5d20cabaa955be4564e9bea6cbb2e41ff9ed47d8b99a29c7a1ac0daf9f0" exitCode=0 Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.750222 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b6d0c5d20cabaa955be4564e9bea6cbb2e41ff9ed47d8b99a29c7a1ac0daf9f0"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.750298 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.751174 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.751212 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.751224 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.752863 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d6bc7f2b1ff11b84c479b0f31fb323db94594e949bfeb5894371f9ae084a10a3"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.752928 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.753619 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.753645 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.753656 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.763544 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.763572 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.763527 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"38a7b077ba529137fe456c558c817b58b8f9625cd1aa771dcec7d3a19be6413e"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.763664 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0b3941e54f5f9307e059eeeafe2a9b2184450c407f88169c9485d514c4f38b2f"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.763676 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"65404dc77b36d640a46635f5e3ba3cc139ac757e34264cbd5846f59cba9c7523"} Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.764342 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.764378 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.764390 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.765001 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.765040 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.765056 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.983271 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.985171 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.985220 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.985231 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:00 crc kubenswrapper[4763]: I1206 08:12:00.985261 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.768940 4763 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4fd7b928425964c9f6601950cec63f8ad84770ec5825e817ac8818d6cb5424f9" exitCode=0 Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769056 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769034 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4fd7b928425964c9f6601950cec63f8ad84770ec5825e817ac8818d6cb5424f9"} Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769084 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769131 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769169 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769222 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.769164 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770079 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770116 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770127 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770316 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770389 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770403 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770425 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770465 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770477 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770713 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770734 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:01 crc kubenswrapper[4763]: I1206 08:12:01.770747 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.775282 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d73b2c0cc0b0808867990a659566a10064ab5d403d965dc06f60ba41e9df425a"} Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.775322 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"509178f7c871b36eeffd09888140c8c02041c3c6967bd93bdcce8efb86788b45"} Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.775340 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e609a4a7aa75dd81184b09e28813014e6148106524f723bdfa1099eb0bb3231b"} Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.775348 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a57ff6770bdc26f57dc5a01b8ee7f4096e5837e5eb5d71c5b1297d84f988be7e"} Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.991272 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.991442 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.991473 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.992627 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.992672 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:02 crc kubenswrapper[4763]: I1206 08:12:02.992688 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:03 crc kubenswrapper[4763]: I1206 08:12:03.781544 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2be27b49a9dded0975ead56b8ace97c28049f2dab5b3aee973be1ff2b0ec7f78"} Dec 06 08:12:03 crc kubenswrapper[4763]: I1206 08:12:03.781763 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:03 crc kubenswrapper[4763]: I1206 08:12:03.783131 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:03 crc kubenswrapper[4763]: I1206 08:12:03.783168 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:03 crc kubenswrapper[4763]: I1206 08:12:03.783179 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.204119 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.571324 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.571504 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.571546 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.572846 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.572916 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.572927 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.784091 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.785371 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.785408 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:04 crc kubenswrapper[4763]: I1206 08:12:04.785423 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.482143 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.482365 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.483419 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.483451 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.483460 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.786501 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.787735 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.787797 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:05 crc kubenswrapper[4763]: I1206 08:12:05.787821 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:06 crc kubenswrapper[4763]: I1206 08:12:06.938519 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:06 crc kubenswrapper[4763]: I1206 08:12:06.938731 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:06 crc kubenswrapper[4763]: I1206 08:12:06.939966 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:06 crc kubenswrapper[4763]: I1206 08:12:06.940015 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:06 crc kubenswrapper[4763]: I1206 08:12:06.940030 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.229829 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.764932 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.765082 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.766057 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.766130 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.766151 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.809139 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.810005 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.810039 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:07 crc kubenswrapper[4763]: I1206 08:12:07.810048 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:07 crc kubenswrapper[4763]: E1206 08:12:07.880476 4763 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.008542 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.013795 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.122635 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.390582 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.390837 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.392156 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.392213 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.392232 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.811629 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.812322 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.812364 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.812376 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:08 crc kubenswrapper[4763]: I1206 08:12:08.816024 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:09 crc kubenswrapper[4763]: I1206 08:12:09.816016 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:09 crc kubenswrapper[4763]: I1206 08:12:09.817569 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:09 crc kubenswrapper[4763]: I1206 08:12:09.817621 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:09 crc kubenswrapper[4763]: I1206 08:12:09.817633 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:10 crc kubenswrapper[4763]: W1206 08:12:10.480634 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.480830 4763 trace.go:236] Trace[1637318967]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 08:12:00.478) (total time: 10001ms): Dec 06 08:12:10 crc kubenswrapper[4763]: Trace[1637318967]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (08:12:10.480) Dec 06 08:12:10 crc kubenswrapper[4763]: Trace[1637318967]: [10.001867389s] [10.001867389s] END Dec 06 08:12:10 crc kubenswrapper[4763]: E1206 08:12:10.480868 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.666071 4763 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 06 08:12:10 crc kubenswrapper[4763]: E1206 08:12:10.677442 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 06 08:12:10 crc kubenswrapper[4763]: W1206 08:12:10.728016 4763 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.728113 4763 trace.go:236] Trace[180304421]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 08:12:00.726) (total time: 10001ms): Dec 06 08:12:10 crc kubenswrapper[4763]: Trace[180304421]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (08:12:10.728) Dec 06 08:12:10 crc kubenswrapper[4763]: Trace[180304421]: [10.001594984s] [10.001594984s] END Dec 06 08:12:10 crc kubenswrapper[4763]: E1206 08:12:10.728134 4763 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.817377 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.818218 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.818272 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:10 crc kubenswrapper[4763]: I1206 08:12:10.818281 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:10 crc kubenswrapper[4763]: E1206 08:12:10.986490 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.123465 4763 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.123566 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.435519 4763 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.435582 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.440551 4763 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 06 08:12:11 crc kubenswrapper[4763]: I1206 08:12:11.440602 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 06 08:12:12 crc kubenswrapper[4763]: I1206 08:12:12.996379 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:12 crc kubenswrapper[4763]: I1206 08:12:12.996544 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:12 crc kubenswrapper[4763]: I1206 08:12:12.997552 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:12 crc kubenswrapper[4763]: I1206 08:12:12.997611 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:12 crc kubenswrapper[4763]: I1206 08:12:12.997625 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:13 crc kubenswrapper[4763]: I1206 08:12:13.000395 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:13 crc kubenswrapper[4763]: I1206 08:12:13.824240 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:13 crc kubenswrapper[4763]: I1206 08:12:13.825081 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:13 crc kubenswrapper[4763]: I1206 08:12:13.825116 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:13 crc kubenswrapper[4763]: I1206 08:12:13.825126 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.187524 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.189243 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.189274 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.189282 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.189302 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:12:14 crc kubenswrapper[4763]: E1206 08:12:14.194707 4763 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 06 08:12:14 crc kubenswrapper[4763]: I1206 08:12:14.780553 4763 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 06 08:12:15 crc kubenswrapper[4763]: I1206 08:12:15.755845 4763 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.425505 4763 trace.go:236] Trace[131369839]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 08:12:01.479) (total time: 14945ms): Dec 06 08:12:16 crc kubenswrapper[4763]: Trace[131369839]: ---"Objects listed" error: 14945ms (08:12:16.425) Dec 06 08:12:16 crc kubenswrapper[4763]: Trace[131369839]: [14.94575691s] [14.94575691s] END Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.425555 4763 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.425879 4763 trace.go:236] Trace[222597649]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Dec-2025 08:12:02.407) (total time: 14018ms): Dec 06 08:12:16 crc kubenswrapper[4763]: Trace[222597649]: ---"Objects listed" error: 14018ms (08:12:16.425) Dec 06 08:12:16 crc kubenswrapper[4763]: Trace[222597649]: [14.018254437s] [14.018254437s] END Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.425927 4763 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.426766 4763 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.453416 4763 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54926->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.453499 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54926->192.168.126.11:17697: read: connection reset by peer" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.453953 4763 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.454037 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.809091 4763 apiserver.go:52] "Watching apiserver" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811204 4763 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811398 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811682 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811768 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811860 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.811886 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.811996 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.812181 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.812218 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.812188 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.812389 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.813867 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.814014 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.814038 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.814079 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.813811 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.814295 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.814676 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.816130 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.816136 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.832140 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.836023 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690" exitCode=255 Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.836279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690"} Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.839015 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.845470 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.845657 4763 scope.go:117] "RemoveContainer" containerID="27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.851836 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.862396 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.872800 4763 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.874702 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.884012 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.893234 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.901655 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.910392 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.921030 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929166 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929200 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929214 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929258 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929287 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929317 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929342 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929366 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929391 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929413 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929436 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929456 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929498 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929548 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929573 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929575 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929605 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929660 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929687 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929711 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929732 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929760 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929784 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929808 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929804 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929831 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929874 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929934 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929957 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929958 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.929977 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930000 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930023 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930134 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930153 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930168 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930183 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930232 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930247 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930267 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930286 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930302 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930321 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930337 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930353 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930370 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930395 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930415 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930431 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930448 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930464 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930717 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930736 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930753 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930776 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930801 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930818 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930833 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930851 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930867 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930882 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930924 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930949 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930990 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931015 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931039 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931059 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931079 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931101 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931123 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931145 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931165 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931183 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931206 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931230 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931256 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931280 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931305 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931446 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931499 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931528 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931555 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931582 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931608 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931634 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931662 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931688 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931713 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931738 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931766 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931791 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931826 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931849 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931872 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931895 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931932 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931955 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931980 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932010 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932035 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932075 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932103 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930392 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930643 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.930821 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931219 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931233 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932372 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931211 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931282 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931366 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931414 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932452 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931448 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931713 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931747 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931766 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931828 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.931980 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932004 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932060 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932026 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932095 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932122 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932418 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932565 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.932671 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:17.432647785 +0000 UTC m=+20.008352823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932683 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932692 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932698 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932952 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932961 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932990 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933071 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933271 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933297 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933321 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933623 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933681 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933780 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933800 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.933953 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934159 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934280 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934284 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934343 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934446 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934466 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934625 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934790 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.932130 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934915 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934928 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934952 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934950 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934970 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.934990 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935025 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935057 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935059 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935157 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935176 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935194 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935211 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935228 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935244 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935329 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935525 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935661 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935722 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.935838 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936062 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936087 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936189 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936189 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936220 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936223 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936243 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936352 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936349 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936373 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936491 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936504 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936577 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936665 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936761 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936782 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936859 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.937685 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.937839 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938106 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.936239 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938389 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938418 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938486 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938509 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938524 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938539 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938574 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938591 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938605 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938626 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938643 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938660 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938675 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938690 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938705 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938720 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938735 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938776 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938794 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938810 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938828 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938843 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938857 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938872 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938889 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938931 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938949 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938966 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.938981 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939013 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939029 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939054 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939070 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939094 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939110 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939127 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939142 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939159 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939177 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939253 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939270 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939604 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939815 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939830 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939844 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939888 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.939989 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940029 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940077 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940099 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940115 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940130 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940145 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940160 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940175 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940339 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940380 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940445 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940467 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940493 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940508 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940523 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940552 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940600 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940711 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940862 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941110 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940940 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.940766 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941092 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941283 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941368 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941389 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941507 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941594 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941773 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.941913 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942090 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942285 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942365 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942435 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942506 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943216 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943319 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943641 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944417 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944509 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944582 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944655 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945510 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946043 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946140 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946162 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946180 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946242 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946265 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946282 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946301 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946318 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946355 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946372 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946389 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946406 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946700 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946717 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946727 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946739 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946751 4763 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946763 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946776 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946788 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946801 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946812 4763 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946850 4763 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946860 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946885 4763 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946930 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946944 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946953 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946963 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946972 4763 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946986 4763 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948106 4763 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948129 4763 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948144 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948155 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948165 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948176 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948186 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948197 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948211 4763 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948221 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948231 4763 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948241 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948253 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942146 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948273 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942180 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942451 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942488 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.942892 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943215 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943279 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943546 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943550 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943594 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943726 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943768 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943832 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944201 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944245 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944472 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944571 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944790 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944813 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.944916 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945124 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.943225 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945448 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945612 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945612 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.945989 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946264 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946305 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946729 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946752 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946776 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946842 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.946765 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947003 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947079 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947128 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947193 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947209 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947358 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947556 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947758 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947762 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948468 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947853 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.947987 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948439 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948605 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948621 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948671 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948729 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948830 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949018 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949037 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949125 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949320 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949684 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.949707 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.949785 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:17.449763037 +0000 UTC m=+20.025468175 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949826 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.949856 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950068 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950097 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950144 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.950440 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.950512 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:17.450498418 +0000 UTC m=+20.026203576 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950596 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950811 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950846 4763 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951045 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951118 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951174 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951180 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.948264 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951295 4763 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951310 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951321 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951332 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951342 4763 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951352 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951368 4763 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.950710 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951379 4763 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953072 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953095 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953127 4763 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953153 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953166 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953179 4763 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953190 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953186 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953206 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953217 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953231 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953246 4763 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953547 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953585 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953600 4763 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953619 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953633 4763 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953649 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953661 4763 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953689 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.951393 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.952440 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.952562 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.952977 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953521 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953745 4763 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953790 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953808 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953820 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953837 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953848 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953858 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953868 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953929 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953943 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953967 4763 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953978 4763 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953994 4763 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954041 4763 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954052 4763 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954066 4763 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953773 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953801 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.953962 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.955060 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954267 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954314 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.955409 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.955589 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.955736 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956016 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.954075 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956424 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956437 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956453 4763 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956476 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956485 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956494 4763 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956527 4763 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956536 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956546 4763 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956558 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.956568 4763 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.957065 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.961209 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.961401 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.962796 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.965512 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.967066 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.961047 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.968407 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.968771 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.969835 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.970727 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.970753 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.970768 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.970835 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:17.470815846 +0000 UTC m=+20.046520984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.972250 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.972272 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.972284 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:16 crc kubenswrapper[4763]: E1206 08:12:16.972337 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:17.472320826 +0000 UTC m=+20.048025864 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.972878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.972971 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.977645 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.979165 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.980588 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.981774 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.981707 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.982338 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.982755 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.982798 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.982877 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.983252 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.983745 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.983769 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.983785 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.984605 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:16 crc kubenswrapper[4763]: I1206 08:12:16.987693 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.000932 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.008964 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057103 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057349 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057463 4763 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057525 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057583 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057640 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057691 4763 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057745 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057795 4763 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057850 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057925 4763 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057981 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.057991 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058034 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058044 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058054 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058064 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058072 4763 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058079 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058088 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058096 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058104 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058112 4763 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058121 4763 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058129 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058136 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058144 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058152 4763 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058161 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058169 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058177 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058185 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058222 4763 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058231 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058238 4763 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058246 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058254 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058264 4763 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058272 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058282 4763 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058290 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058298 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058305 4763 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058313 4763 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058328 4763 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058339 4763 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058350 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058358 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058366 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058373 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058382 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058390 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058398 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058406 4763 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058414 4763 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058422 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058430 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058438 4763 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058446 4763 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058454 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058461 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058469 4763 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058476 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058484 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058491 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058498 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058505 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058513 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058522 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058529 4763 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058536 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058545 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058553 4763 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058561 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058569 4763 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058576 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058584 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058592 4763 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058600 4763 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058608 4763 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058615 4763 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058623 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058631 4763 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058638 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058646 4763 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058653 4763 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058661 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058675 4763 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058683 4763 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058691 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058698 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058706 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058714 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058722 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058729 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058737 4763 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058745 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058753 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058772 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058780 4763 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058787 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058795 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058803 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058811 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058818 4763 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058826 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058834 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058843 4763 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.058851 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.059207 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.132227 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.139598 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.141408 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 06 08:12:17 crc kubenswrapper[4763]: W1206 08:12:17.142181 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-a11d7e9caede99c100ab7de064bd72583d4ca812d4a0ea418c53943d35e523f9 WatchSource:0}: Error finding container a11d7e9caede99c100ab7de064bd72583d4ca812d4a0ea418c53943d35e523f9: Status 404 returned error can't find the container with id a11d7e9caede99c100ab7de064bd72583d4ca812d4a0ea418c53943d35e523f9 Dec 06 08:12:17 crc kubenswrapper[4763]: W1206 08:12:17.152783 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-130de1ffb1b23445c7e762cc99ae31e6151675384dbde3520a5e933ee34abdb1 WatchSource:0}: Error finding container 130de1ffb1b23445c7e762cc99ae31e6151675384dbde3520a5e933ee34abdb1: Status 404 returned error can't find the container with id 130de1ffb1b23445c7e762cc99ae31e6151675384dbde3520a5e933ee34abdb1 Dec 06 08:12:17 crc kubenswrapper[4763]: W1206 08:12:17.157221 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-f65911b2d7d66e7b0638bcf48a033fe77e9f127de2dcc6d7a25891d9bf6afe46 WatchSource:0}: Error finding container f65911b2d7d66e7b0638bcf48a033fe77e9f127de2dcc6d7a25891d9bf6afe46: Status 404 returned error can't find the container with id f65911b2d7d66e7b0638bcf48a033fe77e9f127de2dcc6d7a25891d9bf6afe46 Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.461224 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.461284 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.461323 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.461413 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.461424 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:18.461397506 +0000 UTC m=+21.037102544 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.461459 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:18.461446387 +0000 UTC m=+21.037151425 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.461519 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.461553 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:18.46154674 +0000 UTC m=+21.037251778 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.562573 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.562624 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562744 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562765 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562780 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562749 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562842 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562854 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562828 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:18.562812675 +0000 UTC m=+21.138517713 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:17 crc kubenswrapper[4763]: E1206 08:12:17.562891 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:18.562882017 +0000 UTC m=+21.138587055 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.722786 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.723835 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.725169 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.725875 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.727093 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.727764 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.728378 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.729398 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.729654 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.730069 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.731131 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.731697 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.733041 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.733484 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.733988 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.734943 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.735413 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.736395 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.736924 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.737537 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.738712 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.738800 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.739572 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.740815 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.741463 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.747854 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.748335 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.749249 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.749642 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.750475 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.752309 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.753040 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.754039 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.754647 4763 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.754771 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.756650 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.758065 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.759624 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.759724 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.761755 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.762869 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.763411 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.765241 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.765963 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.767051 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.767664 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.768611 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.769458 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.769935 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.771139 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.772071 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.773091 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.773770 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.774673 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.775222 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.776010 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.776506 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.777046 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.778224 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.778803 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.788740 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.840610 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.842006 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.842155 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.842853 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"130de1ffb1b23445c7e762cc99ae31e6151675384dbde3520a5e933ee34abdb1"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.844309 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.844334 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.844347 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f65911b2d7d66e7b0638bcf48a033fe77e9f127de2dcc6d7a25891d9bf6afe46"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.845793 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.845834 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a11d7e9caede99c100ab7de064bd72583d4ca812d4a0ea418c53943d35e523f9"} Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.854130 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.862182 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.872440 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.882117 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.892007 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.899368 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.906723 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.916927 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.931124 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.943580 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.954228 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.966965 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.978649 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:17 crc kubenswrapper[4763]: I1206 08:12:17.992105 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:17Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.126860 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.130383 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.133727 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.138383 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.149324 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.159862 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.171236 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.184069 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.193753 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.203480 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.213935 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.229142 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2637cd9b-a739-4f6a-ba71-86239190a19a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c6ec754f10ad76b6a83cebf646b1a65a28e07f94917d4032ded6e3ee9b493f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9c99cb8d531d4a215cbc974376de5d5ea688573a02f12e8b176096ba1eb17e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3879440a90dfdaac1409b79fae77e8365f47e327b7c6dfc1a08ace1b6890e455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.255817 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.279478 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.291391 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.302615 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.314823 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.325489 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.459156 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.471875 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.478056 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.478137 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.478257 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.478283 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.478300 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:20.47828169 +0000 UTC m=+23.053986728 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.478345 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.478395 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.478397 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:20.478384263 +0000 UTC m=+23.054089301 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.478480 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:20.478470865 +0000 UTC m=+23.054175893 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.490830 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.503390 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.517344 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2637cd9b-a739-4f6a-ba71-86239190a19a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c6ec754f10ad76b6a83cebf646b1a65a28e07f94917d4032ded6e3ee9b493f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9c99cb8d531d4a215cbc974376de5d5ea688573a02f12e8b176096ba1eb17e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3879440a90dfdaac1409b79fae77e8365f47e327b7c6dfc1a08ace1b6890e455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.531690 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.544123 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.544360 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.554849 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.565504 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.579602 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.579652 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579757 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579782 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579795 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579756 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579862 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579870 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579844 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:20.579828542 +0000 UTC m=+23.155533580 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.579921 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:20.579912525 +0000 UTC m=+23.155617563 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.585984 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"357f276a-1d7b-4f39-a8ca-454f2905dd68\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e609a4a7aa75dd81184b09e28813014e6148106524f723bdfa1099eb0bb3231b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://509178f7c871b36eeffd09888140c8c02041c3c6967bd93bdcce8efb86788b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b2c0cc0b0808867990a659566a10064ab5d403d965dc06f60ba41e9df425a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2be27b49a9dded0975ead56b8ace97c28049f2dab5b3aee973be1ff2b0ec7f78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a57ff6770bdc26f57dc5a01b8ee7f4096e5837e5eb5d71c5b1297d84f988be7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b880fdc6c6709fd94beb04c3dedc123b15b165ecbc1c1801c55dbe3e26cfc4b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b880fdc6c6709fd94beb04c3dedc123b15b165ecbc1c1801c55dbe3e26cfc4b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d0c5d20cabaa955be4564e9bea6cbb2e41ff9ed47d8b99a29c7a1ac0daf9f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d0c5d20cabaa955be4564e9bea6cbb2e41ff9ed47d8b99a29c7a1ac0daf9f0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4fd7b928425964c9f6601950cec63f8ad84770ec5825e817ac8818d6cb5424f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fd7b928425964c9f6601950cec63f8ad84770ec5825e817ac8818d6cb5424f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:12:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.596732 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4413feb6-e0d5-46c0-9f03-8b07886f1cc8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1206 08:12:11.154082 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1206 08:12:11.155300 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3935535561/tls.crt::/tmp/serving-cert-3935535561/tls.key\\\\\\\"\\\\nI1206 08:12:16.436504 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1206 08:12:16.438703 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1206 08:12:16.438720 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1206 08:12:16.438741 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1206 08:12:16.438745 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1206 08:12:16.443141 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1206 08:12:16.443168 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443173 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1206 08:12:16.443186 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nI1206 08:12:16.443183 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1206 08:12:16.443190 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1206 08:12:16.443211 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1206 08:12:16.443216 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1206 08:12:16.445945 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.606440 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2637cd9b-a739-4f6a-ba71-86239190a19a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-06T08:11:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c6ec754f10ad76b6a83cebf646b1a65a28e07f94917d4032ded6e3ee9b493f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9c99cb8d531d4a215cbc974376de5d5ea688573a02f12e8b176096ba1eb17e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3879440a90dfdaac1409b79fae77e8365f47e327b7c6dfc1a08ace1b6890e455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:11:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-06T08:11:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.617957 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29e4c90261b669ef9620a5110059be62ded1d5bc99e2d9fb03d1f7022c0f2df2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.634323 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.645006 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.678556 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.696778 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.713148 4763 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-06T08:12:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2296ffd7b42cca5f2dbaec404d33795e05e5fcc07583fa0c921337569f64b17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a784bfd7ddaf490f76d1c6550e5d3136efcbf58ae2ec2453416aa4affdb65a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-06T08:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-06T08:12:18Z is after 2025-08-24T17:21:41Z" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.719292 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.719319 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:18 crc kubenswrapper[4763]: I1206 08:12:18.719382 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.719408 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.719470 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.719522 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:18 crc kubenswrapper[4763]: E1206 08:12:18.854024 4763 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.493637 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.493686 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.493730 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.493796 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.493771333 +0000 UTC m=+27.069476371 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.493836 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.493876 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.493863987 +0000 UTC m=+27.069569025 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.493944 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.494054 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.494028941 +0000 UTC m=+27.069734059 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.594227 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.594320 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594805 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594824 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594883 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594916 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594830 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594966 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.594919 4763 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.594997 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.594979447 +0000 UTC m=+27.170684475 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.595054 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.595023139 +0000 UTC m=+27.170728187 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.597137 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.597174 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.597183 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.597222 4763 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.603749 4763 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.604001 4763 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.605061 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.605113 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.605128 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.605146 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.605161 4763 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-06T08:12:20Z","lastTransitionTime":"2025-12-06T08:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.719018 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.719018 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.719156 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.719309 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.719399 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:20 crc kubenswrapper[4763]: E1206 08:12:20.719480 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.853930 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8ac75cf1f7b4f30ed3c2aafb67e776b32dea2f36108c69eaa8ab7a2c248951d6"} Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.897097 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=4.897077086 podStartE2EDuration="4.897077086s" podCreationTimestamp="2025-12-06 08:12:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:20.885509014 +0000 UTC m=+23.461214072" watchObservedRunningTime="2025-12-06 08:12:20.897077086 +0000 UTC m=+23.472782124" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.897366 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-dhqz6"] Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.897672 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.898983 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.899260 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.899347 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.910573 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=2.9105556 podStartE2EDuration="2.9105556s" podCreationTimestamp="2025-12-06 08:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:20.900865688 +0000 UTC m=+23.476570726" watchObservedRunningTime="2025-12-06 08:12:20.9105556 +0000 UTC m=+23.486260638" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.910712 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-wq76d"] Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.910983 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.914613 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.914639 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.914634 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.914876 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.986313 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=2.9862964659999998 podStartE2EDuration="2.986296466s" podCreationTimestamp="2025-12-06 08:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:20.983813178 +0000 UTC m=+23.559518216" watchObservedRunningTime="2025-12-06 08:12:20.986296466 +0000 UTC m=+23.562001504" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.989147 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vm2pv"] Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.989495 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.991583 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.991857 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.992026 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.992373 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.992622 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998289 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998330 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-os-release\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998353 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-socket-dir-parent\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998374 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-hosts-file\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998406 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-hostroot\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998494 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-kubelet\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998611 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-system-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998644 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-bin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998664 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-conf-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998685 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-multus-certs\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998718 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-cni-binary-copy\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998741 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-multus-daemon-config\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998760 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl947\" (UniqueName: \"kubernetes.io/projected/8dc562e3-8b35-4486-8731-dc26218daf86-kube-api-access-bl947\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.998781 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjkdq\" (UniqueName: \"kubernetes.io/projected/11b09917-f546-487b-b78a-9b66af1ab00f-kube-api-access-qjkdq\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999061 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-netns\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999098 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11b09917-f546-487b-b78a-9b66af1ab00f-host\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999135 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-cnibin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999157 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-k8s-cni-cncf-io\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999185 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-multus\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999204 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-etc-kubernetes\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999224 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d67f8\" (UniqueName: \"kubernetes.io/projected/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-kube-api-access-d67f8\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:20 crc kubenswrapper[4763]: I1206 08:12:20.999243 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11b09917-f546-487b-b78a-9b66af1ab00f-serviceca\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.000629 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-859jt"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.001368 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.002825 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-np59r"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.003183 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.003789 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.003902 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.004048 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.004375 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.004580 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.005211 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.005743 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.006227 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.006424 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.006565 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.006757 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.006927 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.007002 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.065044 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-p2rk6"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.065451 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: E1206 08:12:21.065512 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.099885 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-hostroot\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.099952 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-kubelet\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.099974 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86wnp\" (UniqueName: \"kubernetes.io/projected/75bb7009-deac-407d-901d-035c51914a8a-kube-api-access-86wnp\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.099979 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-hostroot\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.099990 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-system-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100051 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-bin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100057 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-kubelet\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100075 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75bb7009-deac-407d-901d-035c51914a8a-mcd-auth-proxy-config\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100108 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-system-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100101 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100145 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/75bb7009-deac-407d-901d-035c51914a8a-rootfs\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100163 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100110 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-bin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100182 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-cni-binary-copy\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100267 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-multus-daemon-config\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100306 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100331 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100369 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-cnibin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100397 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-k8s-cni-cncf-io\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100439 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-k8s-cni-cncf-io\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100428 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-cnibin\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100442 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-etc-kubernetes\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100474 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-etc-kubernetes\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100489 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d67f8\" (UniqueName: \"kubernetes.io/projected/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-kube-api-access-d67f8\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100509 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-binary-copy\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100565 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-os-release\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100582 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-socket-dir-parent\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100599 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100620 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-hosts-file\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100637 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100655 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100664 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-os-release\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100671 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzm5w\" (UniqueName: \"kubernetes.io/projected/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-kube-api-access-xzm5w\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100717 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-hosts-file\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100727 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-conf-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100754 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-conf-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100778 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-cni-binary-copy\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100799 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-cni-dir\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100801 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-multus-certs\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100853 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100865 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-multus-certs\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100800 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8dc562e3-8b35-4486-8731-dc26218daf86-multus-daemon-config\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100889 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-os-release\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100948 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100971 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-system-cni-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.100994 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-cnibin\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101024 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl947\" (UniqueName: \"kubernetes.io/projected/8dc562e3-8b35-4486-8731-dc26218daf86-kube-api-access-bl947\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101046 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjkdq\" (UniqueName: \"kubernetes.io/projected/11b09917-f546-487b-b78a-9b66af1ab00f-kube-api-access-qjkdq\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101070 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-netns\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101095 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11b09917-f546-487b-b78a-9b66af1ab00f-host\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101120 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t5vx\" (UniqueName: \"kubernetes.io/projected/cf614e8e-1f39-41c7-8325-b57f028af887-kube-api-access-7t5vx\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101123 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-run-netns\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101187 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11b09917-f546-487b-b78a-9b66af1ab00f-host\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101204 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-multus\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101230 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11b09917-f546-487b-b78a-9b66af1ab00f-serviceca\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101252 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-host-var-lib-cni-multus\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101257 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75bb7009-deac-407d-901d-035c51914a8a-proxy-tls\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.101572 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8dc562e3-8b35-4486-8731-dc26218daf86-multus-socket-dir-parent\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.107258 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11b09917-f546-487b-b78a-9b66af1ab00f-serviceca\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.131325 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5lcfn"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.132127 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.135522 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl947\" (UniqueName: \"kubernetes.io/projected/8dc562e3-8b35-4486-8731-dc26218daf86-kube-api-access-bl947\") pod \"multus-vm2pv\" (UID: \"8dc562e3-8b35-4486-8731-dc26218daf86\") " pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.138240 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d67f8\" (UniqueName: \"kubernetes.io/projected/f30d9eee-4b49-4a1c-8bff-280b619fd3fc-kube-api-access-d67f8\") pod \"node-resolver-dhqz6\" (UID: \"f30d9eee-4b49-4a1c-8bff-280b619fd3fc\") " pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.139323 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjkdq\" (UniqueName: \"kubernetes.io/projected/11b09917-f546-487b-b78a-9b66af1ab00f-kube-api-access-qjkdq\") pod \"node-ca-wq76d\" (UID: \"11b09917-f546-487b-b78a-9b66af1ab00f\") " pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.139614 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.139798 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.139949 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.140054 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.140080 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.140210 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.153051 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202513 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202555 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-os-release\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202577 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202606 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202626 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-system-cni-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202645 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-cnibin\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202668 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202690 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202699 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-system-cni-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202712 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6m6d\" (UniqueName: \"kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202734 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202739 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-os-release\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202727 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-cnibin\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202760 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t5vx\" (UniqueName: \"kubernetes.io/projected/cf614e8e-1f39-41c7-8325-b57f028af887-kube-api-access-7t5vx\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202829 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202874 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75bb7009-deac-407d-901d-035c51914a8a-proxy-tls\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202891 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202933 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.202996 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203026 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203075 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86wnp\" (UniqueName: \"kubernetes.io/projected/75bb7009-deac-407d-901d-035c51914a8a-kube-api-access-86wnp\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203096 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75bb7009-deac-407d-901d-035c51914a8a-mcd-auth-proxy-config\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203150 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203189 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/75bb7009-deac-407d-901d-035c51914a8a-rootfs\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203210 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203231 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203269 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203295 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203317 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203340 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203362 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203388 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-binary-copy\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203405 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/75bb7009-deac-407d-901d-035c51914a8a-rootfs\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203418 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203440 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: E1206 08:12:21.203467 4763 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:21 crc kubenswrapper[4763]: E1206 08:12:21.203504 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs podName:dcd65fd5-43dc-42a9-84d9-e37bb8e220af nodeName:}" failed. No retries permitted until 2025-12-06 08:12:21.703491442 +0000 UTC m=+24.279196470 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs") pod "network-metrics-daemon-p2rk6" (UID: "dcd65fd5-43dc-42a9-84d9-e37bb8e220af") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203514 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf614e8e-1f39-41c7-8325-b57f028af887-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203556 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203582 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203587 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203613 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzm5w\" (UniqueName: \"kubernetes.io/projected/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-kube-api-access-xzm5w\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203634 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203649 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75bb7009-deac-407d-901d-035c51914a8a-mcd-auth-proxy-config\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203661 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203565 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203681 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203715 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.203831 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.204097 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.204308 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf614e8e-1f39-41c7-8325-b57f028af887-cni-binary-copy\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.206352 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.208304 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75bb7009-deac-407d-901d-035c51914a8a-proxy-tls\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.210596 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dhqz6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.222283 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wq76d" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.231821 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzm5w\" (UniqueName: \"kubernetes.io/projected/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-kube-api-access-xzm5w\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.233955 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86wnp\" (UniqueName: \"kubernetes.io/projected/75bb7009-deac-407d-901d-035c51914a8a-kube-api-access-86wnp\") pod \"machine-config-daemon-np59r\" (UID: \"75bb7009-deac-407d-901d-035c51914a8a\") " pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.236526 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t5vx\" (UniqueName: \"kubernetes.io/projected/cf614e8e-1f39-41c7-8325-b57f028af887-kube-api-access-7t5vx\") pod \"multus-additional-cni-plugins-859jt\" (UID: \"cf614e8e-1f39-41c7-8325-b57f028af887\") " pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.241274 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a34acc63-cca7-4fd8-8f36-4b6d74178dd1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2k2lg\" (UID: \"a34acc63-cca7-4fd8-8f36-4b6d74178dd1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.301305 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vm2pv" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304188 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304237 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304265 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304288 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304314 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304335 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304356 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304360 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304378 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304411 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304438 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304434 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304462 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304461 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304502 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304524 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6m6d\" (UniqueName: \"kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304536 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304553 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304572 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304579 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304590 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304605 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304614 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304645 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304647 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304621 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304677 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304695 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304713 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304736 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304800 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304844 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304863 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304882 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.304905 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.305024 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.305123 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.305281 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.311856 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.317237 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-859jt" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.325169 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.331298 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6m6d\" (UniqueName: \"kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d\") pod \"ovnkube-node-5lcfn\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.334660 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.454882 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.531042 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld"] Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.531789 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.534539 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.534671 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.607113 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c46gt\" (UniqueName: \"kubernetes.io/projected/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-kube-api-access-c46gt\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.607158 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.607200 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.607223 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.708343 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.708381 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.708407 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.708434 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c46gt\" (UniqueName: \"kubernetes.io/projected/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-kube-api-access-c46gt\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.708469 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: E1206 08:12:21.708544 4763 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:21 crc kubenswrapper[4763]: E1206 08:12:21.708610 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs podName:dcd65fd5-43dc-42a9-84d9-e37bb8e220af nodeName:}" failed. No retries permitted until 2025-12-06 08:12:22.708590893 +0000 UTC m=+25.284295931 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs") pod "network-metrics-daemon-p2rk6" (UID: "dcd65fd5-43dc-42a9-84d9-e37bb8e220af") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.709120 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.709120 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.711467 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.731491 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c46gt\" (UniqueName: \"kubernetes.io/projected/2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd-kube-api-access-c46gt\") pod \"ovnkube-control-plane-749d76644c-6dgld\" (UID: \"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.848979 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.859255 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"0dbee0d0b59cde221d9709f6b7debeb4c2ba83844c9a65c540977f175a6deeff"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.859298 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.859314 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"c34648e02d715c92c4c8e71e076f98fa89d627b6ef9f20e88a350f9b1a0b2484"} Dec 06 08:12:21 crc kubenswrapper[4763]: W1206 08:12:21.860641 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cf3552f_e5d3_4df7_a1bf_e5bee00ac9bd.slice/crio-b41d0848628005a4f4027d99c934281a5348172a8663a0e018989c14795b99a0 WatchSource:0}: Error finding container b41d0848628005a4f4027d99c934281a5348172a8663a0e018989c14795b99a0: Status 404 returned error can't find the container with id b41d0848628005a4f4027d99c934281a5348172a8663a0e018989c14795b99a0 Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.861120 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" exitCode=0 Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.861170 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.861190 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"06a74ed727c8e7de3a1553d52aa00788badbf51bb62410c10d6e4089437ab9c7"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.863990 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerStarted","Data":"9e406c0fd5ba587d3b3a8da40c785e207ede01455b836aeb766876f94b1f43f9"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.864020 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerStarted","Data":"b8c02648068fdd03922166eab6dee020aa28a5bde88346e7630f30efce474d6b"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.865485 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dhqz6" event={"ID":"f30d9eee-4b49-4a1c-8bff-280b619fd3fc","Type":"ContainerStarted","Data":"e87e76e943d6064ad929c5e73ec50751dc83ea5e87b69395a9f61e011e414f58"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.865518 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dhqz6" event={"ID":"f30d9eee-4b49-4a1c-8bff-280b619fd3fc","Type":"ContainerStarted","Data":"4c4f8b525954fd4e212aa562e6e1b021136c88f9a6777e4de03bd569987982d9"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.867031 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm2pv" event={"ID":"8dc562e3-8b35-4486-8731-dc26218daf86","Type":"ContainerStarted","Data":"2306e1b8490b106c47ba5d9c90a11e5123ee0a441a0f62b347eafd32c269e9d6"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.867071 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm2pv" event={"ID":"8dc562e3-8b35-4486-8731-dc26218daf86","Type":"ContainerStarted","Data":"4af3f83c1e915f9caacd86f7b6779d1d7a4c013a93e7ac22db82b5f024504682"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.868398 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wq76d" event={"ID":"11b09917-f546-487b-b78a-9b66af1ab00f","Type":"ContainerStarted","Data":"93b945f3522eacab58ae6425bd0c7737c5b8dd095dcc5c389cefeff7f696d660"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.868427 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wq76d" event={"ID":"11b09917-f546-487b-b78a-9b66af1ab00f","Type":"ContainerStarted","Data":"a031ff407950c57efafc02ae51506faebc96de9fd87fdc7c7ad945fadedba112"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.870471 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" event={"ID":"a34acc63-cca7-4fd8-8f36-4b6d74178dd1","Type":"ContainerStarted","Data":"979f020efe730b3a7d6a297f8421938b7d88877d0d58897262ee0ce715109a35"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.870519 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" event={"ID":"a34acc63-cca7-4fd8-8f36-4b6d74178dd1","Type":"ContainerStarted","Data":"8e952e538486638e1a34dec14237ed2ac8dbba23da7556009b6a03f73273c9d2"} Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.879638 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podStartSLOduration=1.879614592 podStartE2EDuration="1.879614592s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:21.877533136 +0000 UTC m=+24.453238194" watchObservedRunningTime="2025-12-06 08:12:21.879614592 +0000 UTC m=+24.455319640" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.896148 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vm2pv" podStartSLOduration=1.896127168 podStartE2EDuration="1.896127168s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:21.895924303 +0000 UTC m=+24.471629341" watchObservedRunningTime="2025-12-06 08:12:21.896127168 +0000 UTC m=+24.471832206" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.946496 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2k2lg" podStartSLOduration=1.9464749380000002 podStartE2EDuration="1.946474938s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:21.946270193 +0000 UTC m=+24.521975231" watchObservedRunningTime="2025-12-06 08:12:21.946474938 +0000 UTC m=+24.522179986" Dec 06 08:12:21 crc kubenswrapper[4763]: I1206 08:12:21.973492 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dhqz6" podStartSLOduration=1.973473577 podStartE2EDuration="1.973473577s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:21.957887526 +0000 UTC m=+24.533592564" watchObservedRunningTime="2025-12-06 08:12:21.973473577 +0000 UTC m=+24.549178635" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.000831 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-wq76d" podStartSLOduration=2.000809256 podStartE2EDuration="2.000809256s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:21.973690423 +0000 UTC m=+24.549395471" watchObservedRunningTime="2025-12-06 08:12:22.000809256 +0000 UTC m=+24.576514314" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.718936 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.719044 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.719040 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.719099 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.719235 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719472 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719355 4763 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719550 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs podName:dcd65fd5-43dc-42a9-84d9-e37bb8e220af nodeName:}" failed. No retries permitted until 2025-12-06 08:12:24.719536747 +0000 UTC m=+27.295241785 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs") pod "network-metrics-daemon-p2rk6" (UID: "dcd65fd5-43dc-42a9-84d9-e37bb8e220af") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719620 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719716 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:22 crc kubenswrapper[4763]: E1206 08:12:22.719783 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.874488 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" event={"ID":"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd","Type":"ContainerStarted","Data":"a6006f90b34756deef4a9fcc73fae8e7d490a331b3377d2250de501087432e90"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.874543 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" event={"ID":"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd","Type":"ContainerStarted","Data":"36a7305f813d314abaa77ab26ca7877fb7f7bc5966b93efc62a10e818da5f38f"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.874559 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" event={"ID":"2cf3552f-e5d3-4df7-a1bf-e5bee00ac9bd","Type":"ContainerStarted","Data":"b41d0848628005a4f4027d99c934281a5348172a8663a0e018989c14795b99a0"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.875603 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="9e406c0fd5ba587d3b3a8da40c785e207ede01455b836aeb766876f94b1f43f9" exitCode=0 Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.875664 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"9e406c0fd5ba587d3b3a8da40c785e207ede01455b836aeb766876f94b1f43f9"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.885062 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.885117 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.885131 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.885142 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.885156 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:12:22 crc kubenswrapper[4763]: I1206 08:12:22.896209 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6dgld" podStartSLOduration=1.8961947179999998 podStartE2EDuration="1.896194718s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:22.89516209 +0000 UTC m=+25.470867129" watchObservedRunningTime="2025-12-06 08:12:22.896194718 +0000 UTC m=+25.471899756" Dec 06 08:12:23 crc kubenswrapper[4763]: I1206 08:12:23.890075 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="79b98bf5735f5fbdcaeca33f16504392f40db51667e15ca90b17f51926ca2d6e" exitCode=0 Dec 06 08:12:23 crc kubenswrapper[4763]: I1206 08:12:23.890151 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"79b98bf5735f5fbdcaeca33f16504392f40db51667e15ca90b17f51926ca2d6e"} Dec 06 08:12:23 crc kubenswrapper[4763]: I1206 08:12:23.895405 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.537864 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.538292 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.538353 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.538487 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.538554 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:32.538528414 +0000 UTC m=+35.114233462 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.538593 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:32.538583655 +0000 UTC m=+35.114288693 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.538677 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.538729 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:32.538717819 +0000 UTC m=+35.114422857 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.639440 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.639489 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639609 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639623 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639633 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639656 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639684 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:32.639669266 +0000 UTC m=+35.215374304 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639692 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639706 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.639757 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:32.639740228 +0000 UTC m=+35.215445266 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.719359 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.719416 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.719437 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.719495 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.719553 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.719719 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.719837 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.719936 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.740928 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.741037 4763 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: E1206 08:12:24.741084 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs podName:dcd65fd5-43dc-42a9-84d9-e37bb8e220af nodeName:}" failed. No retries permitted until 2025-12-06 08:12:28.741072074 +0000 UTC m=+31.316777112 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs") pod "network-metrics-daemon-p2rk6" (UID: "dcd65fd5-43dc-42a9-84d9-e37bb8e220af") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.900219 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="c52fb2caef8a6d99d09532c89c7562d84d76dfb7b3ce28558280bfe70bceef85" exitCode=0 Dec 06 08:12:24 crc kubenswrapper[4763]: I1206 08:12:24.900261 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"c52fb2caef8a6d99d09532c89c7562d84d76dfb7b3ce28558280bfe70bceef85"} Dec 06 08:12:25 crc kubenswrapper[4763]: I1206 08:12:25.907718 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="bf5329cb58863c806ccb2d8f9d250f9f014519a7d5cc01c81be5caff48422c90" exitCode=0 Dec 06 08:12:25 crc kubenswrapper[4763]: I1206 08:12:25.907818 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"bf5329cb58863c806ccb2d8f9d250f9f014519a7d5cc01c81be5caff48422c90"} Dec 06 08:12:25 crc kubenswrapper[4763]: I1206 08:12:25.913891 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.718667 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.718717 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.718764 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:26 crc kubenswrapper[4763]: E1206 08:12:26.718770 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.718857 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:26 crc kubenswrapper[4763]: E1206 08:12:26.719018 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:26 crc kubenswrapper[4763]: E1206 08:12:26.719080 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:26 crc kubenswrapper[4763]: E1206 08:12:26.719131 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.920480 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="45b5fd7c7ad7311bdff43dabecb8319d4a94d3124f5f04ae9eeb438aeeb934b8" exitCode=0 Dec 06 08:12:26 crc kubenswrapper[4763]: I1206 08:12:26.920523 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"45b5fd7c7ad7311bdff43dabecb8319d4a94d3124f5f04ae9eeb438aeeb934b8"} Dec 06 08:12:27 crc kubenswrapper[4763]: I1206 08:12:27.768773 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.718503 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.718563 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.718610 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.718921 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.718949 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.719063 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.719201 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.719300 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.819689 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.819864 4763 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:28 crc kubenswrapper[4763]: E1206 08:12:28.819964 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs podName:dcd65fd5-43dc-42a9-84d9-e37bb8e220af nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.819939676 +0000 UTC m=+39.395644724 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs") pod "network-metrics-daemon-p2rk6" (UID: "dcd65fd5-43dc-42a9-84d9-e37bb8e220af") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 06 08:12:28 crc kubenswrapper[4763]: I1206 08:12:28.933989 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerStarted","Data":"eab8804ad99146dc08658e78206d2b928fb86ff5f5b167c6d60bb5e17305bfeb"} Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.939955 4763 generic.go:334] "Generic (PLEG): container finished" podID="cf614e8e-1f39-41c7-8325-b57f028af887" containerID="eab8804ad99146dc08658e78206d2b928fb86ff5f5b167c6d60bb5e17305bfeb" exitCode=0 Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.940068 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerDied","Data":"eab8804ad99146dc08658e78206d2b928fb86ff5f5b167c6d60bb5e17305bfeb"} Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.948616 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerStarted","Data":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.948845 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.948868 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.948877 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.975764 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:29 crc kubenswrapper[4763]: I1206 08:12:29.978090 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.027663 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podStartSLOduration=9.027637814 podStartE2EDuration="9.027637814s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:29.997465168 +0000 UTC m=+32.573170216" watchObservedRunningTime="2025-12-06 08:12:30.027637814 +0000 UTC m=+32.603342862" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.719259 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:30 crc kubenswrapper[4763]: E1206 08:12:30.719407 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.719287 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:30 crc kubenswrapper[4763]: E1206 08:12:30.719503 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.719274 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:30 crc kubenswrapper[4763]: E1206 08:12:30.719583 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.719744 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:30 crc kubenswrapper[4763]: E1206 08:12:30.719918 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.955618 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859jt" event={"ID":"cf614e8e-1f39-41c7-8325-b57f028af887","Type":"ContainerStarted","Data":"d53bf1d747c375018b8a19a613b5b05383ed9d3f82b087c63ab1bac16d062a6f"} Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.980118 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-859jt" podStartSLOduration=10.980099377 podStartE2EDuration="10.980099377s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:30.979132611 +0000 UTC m=+33.554837659" watchObservedRunningTime="2025-12-06 08:12:30.980099377 +0000 UTC m=+33.555804415" Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.997811 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-p2rk6"] Dec 06 08:12:30 crc kubenswrapper[4763]: I1206 08:12:30.998135 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:30 crc kubenswrapper[4763]: E1206 08:12:30.998267 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.568079 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.568245 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:48.568221929 +0000 UTC m=+51.143926967 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.568377 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.568404 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.568472 4763 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.568530 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:48.568522737 +0000 UTC m=+51.144227775 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.568555 4763 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.568619 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:48.568599189 +0000 UTC m=+51.144304227 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.669629 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.669692 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.670201 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.670356 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.670387 4763 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.670594 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:48.670570203 +0000 UTC m=+51.246275261 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.674213 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.674298 4763 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.674319 4763 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.674419 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-06 08:12:48.674395337 +0000 UTC m=+51.250100375 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.719059 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.719109 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.719066 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.719179 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 06 08:12:32 crc kubenswrapper[4763]: I1206 08:12:32.719195 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.719274 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p2rk6" podUID="dcd65fd5-43dc-42a9-84d9-e37bb8e220af" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.719336 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 06 08:12:32 crc kubenswrapper[4763]: E1206 08:12:32.719373 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.945452 4763 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.945601 4763 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.979458 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lrzmx"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.979839 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.985053 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.985586 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4ffb"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.985947 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.986261 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.987483 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.990294 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.990476 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.992347 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.993984 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l9dxz"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.994505 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.994735 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:33 crc kubenswrapper[4763]: I1206 08:12:33.994956 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.014473 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.018205 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033101 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033191 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033287 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033355 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033411 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033477 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033517 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033552 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033618 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.033365 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.035685 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.035808 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.035920 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.036462 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.037334 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s62v"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.037738 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.037834 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.039557 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.039889 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.040032 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.040210 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.040039 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.041390 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.041750 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.041953 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.041973 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.042122 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.042979 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.046473 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.046550 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.046673 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.046767 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.046964 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047072 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047181 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047301 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047459 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047602 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047659 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047730 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047792 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.047836 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.050362 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.055800 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.056278 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.057141 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.057326 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.057565 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.057661 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qvr9"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058129 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058430 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4xfrk"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058751 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058931 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058970 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.059166 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.059608 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.058931 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.059887 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.059987 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.059623 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.062454 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.062803 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.063086 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.063266 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.063398 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.063472 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzw6k"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.063881 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.064753 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.082927 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083171 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083343 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083563 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083606 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083724 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.083860 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.084457 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.085002 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.085321 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.085567 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.085804 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.086235 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.111716 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.112540 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.112719 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113170 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-policies\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113201 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-image-import-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113222 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113241 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-node-pullsecrets\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113260 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-dir\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113275 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113295 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113310 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g4dd\" (UniqueName: \"kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113327 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113343 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-client\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113372 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q2qn\" (UniqueName: \"kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113389 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-config\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113404 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113421 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113438 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-serving-cert\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113453 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113468 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113486 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113509 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113523 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113540 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nlch\" (UniqueName: \"kubernetes.io/projected/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-kube-api-access-2nlch\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113555 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113577 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113592 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113607 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113630 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113647 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-client\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113665 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n2zr\" (UniqueName: \"kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113681 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113700 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-serving-cert\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113717 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113732 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113748 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit-dir\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113764 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113778 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113799 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113802 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113816 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-serving-cert\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113832 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113851 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92zxz\" (UniqueName: \"kubernetes.io/projected/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-kube-api-access-92zxz\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113867 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113880 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-encryption-config\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113920 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113935 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-serving-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113953 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-encryption-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113968 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113978 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.113992 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hwfm\" (UniqueName: \"kubernetes.io/projected/2c40e17b-58b1-423e-9d70-c29ec900e983-kube-api-access-6hwfm\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.114007 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.114108 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.114656 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.115990 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116075 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116176 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116235 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116321 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116530 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.116869 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117017 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117475 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117624 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117695 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117816 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118391 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118499 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118543 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118644 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118694 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118805 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.118857 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.119130 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.117948 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.119937 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.120545 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.120631 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.121109 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.121237 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.121311 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.121486 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.121684 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.124299 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-mt26w"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.124613 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.124800 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125086 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125256 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125383 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125475 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125563 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.125628 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.126201 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.126800 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.132218 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.133107 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.133387 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.133882 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.134999 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.135702 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.136808 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.138053 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.138750 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.138986 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.139363 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.140332 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.144016 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.144979 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dbhnr"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.146138 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.146165 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.153276 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p68hx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.172698 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.173315 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.174160 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.182273 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rppzh"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.183563 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.183638 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.184218 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.205061 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.205279 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.210942 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.211628 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.211947 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.212048 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.212384 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.212484 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.213256 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.214080 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.214614 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215021 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-serving-cert\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215170 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215264 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215350 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215624 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.216842 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.217233 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.217693 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.215887 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218415 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrf7f\" (UniqueName: \"kubernetes.io/projected/75bdca6d-084a-4898-bf07-5371cf477720-kube-api-access-qrf7f\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218518 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nlch\" (UniqueName: \"kubernetes.io/projected/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-kube-api-access-2nlch\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218623 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218729 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218826 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218981 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.219131 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.218682 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.220016 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.220379 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.220701 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.220876 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.221047 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-client\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.221772 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n2zr\" (UniqueName: \"kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.220935 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.221319 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222074 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222008 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222427 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-serving-cert\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222556 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222701 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222849 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-images\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223005 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/75bdca6d-084a-4898-bf07-5371cf477720-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223263 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223386 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit-dir\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223506 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit-dir\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223282 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.222361 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223518 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223828 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.223948 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.224058 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-serving-cert\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225085 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225282 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225390 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g85cl\" (UniqueName: \"kubernetes.io/projected/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-kube-api-access-g85cl\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225465 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92zxz\" (UniqueName: \"kubernetes.io/projected/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-kube-api-access-92zxz\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.224501 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225594 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.224840 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.224709 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225805 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-serving-cert\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.225918 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226005 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-encryption-config\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226074 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmpjf\" (UniqueName: \"kubernetes.io/projected/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-kube-api-access-xmpjf\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226576 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226732 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-serving-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226801 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-encryption-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.226928 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227046 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hwfm\" (UniqueName: \"kubernetes.io/projected/2c40e17b-58b1-423e-9d70-c29ec900e983-kube-api-access-6hwfm\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227114 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227177 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75bdca6d-084a-4898-bf07-5371cf477720-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227257 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-policies\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227324 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-image-import-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227387 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227455 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-node-pullsecrets\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227517 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-dir\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227587 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227650 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g4dd\" (UniqueName: \"kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227710 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227771 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227853 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-client\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227948 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-config\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.228020 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q2qn\" (UniqueName: \"kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.228125 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-config\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.228216 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.228305 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230281 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-serving-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230443 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-dir\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230515 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-config\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230572 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-etcd-client\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230810 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-image-import-ca\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.227270 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-service-ca-bundle\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230876 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-node-pullsecrets\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230978 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-audit\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.230992 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.231634 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c40e17b-58b1-423e-9d70-c29ec900e983-audit-policies\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.231756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.232645 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-serving-cert\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.233081 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.233759 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.234877 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.235216 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-encryption-config\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.235704 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.235715 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bf86d"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.236399 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.236702 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2c40e17b-58b1-423e-9d70-c29ec900e983-etcd-client\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.237060 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.237105 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.241017 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-5wtjb"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.241166 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.239570 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-encryption-config\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.237138 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.239422 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-serving-cert\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.242293 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.242474 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.242872 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lrzmx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.242918 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4ffb"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.243031 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.245063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.245103 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.245469 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.248582 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzw6k"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.248622 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.248633 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qvr9"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.252533 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.253692 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.254657 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.255612 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.256515 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4xfrk"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.258875 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-2df96"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.259440 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-ln8pp"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.259700 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.259807 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.259886 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s62v"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.260777 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.261640 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-8m2kx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.261996 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.262853 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.262857 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p68hx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.263466 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rppzh"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.264496 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.265399 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l9dxz"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.266553 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.267821 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.269474 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.275246 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.278291 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.281291 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.282149 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.282438 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.284627 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.285649 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.286778 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.287808 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.288893 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dbhnr"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.289996 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.291113 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bf86d"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.292205 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.293402 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b9lvw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.294714 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.294828 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.295746 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8m2kx"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.296962 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2df96"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.298132 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.299436 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b9lvw"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.300698 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm"] Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.302016 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.322397 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329626 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrf7f\" (UniqueName: \"kubernetes.io/projected/75bdca6d-084a-4898-bf07-5371cf477720-kube-api-access-qrf7f\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329694 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329721 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-images\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329754 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/75bdca6d-084a-4898-bf07-5371cf477720-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329771 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329788 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329804 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g85cl\" (UniqueName: \"kubernetes.io/projected/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-kube-api-access-g85cl\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329841 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmpjf\" (UniqueName: \"kubernetes.io/projected/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-kube-api-access-xmpjf\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329935 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75bdca6d-084a-4898-bf07-5371cf477720-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.329964 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-config\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.330526 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/75bdca6d-084a-4898-bf07-5371cf477720-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.331131 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-config\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.331130 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.331332 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-images\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.333213 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.333570 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75bdca6d-084a-4898-bf07-5371cf477720-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.334333 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.341969 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.370059 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.381875 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.402070 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.422374 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.443319 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.462017 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.502880 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.522562 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.542248 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.561814 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 06 08:12:34 crc kubenswrapper[4763]: I1206 08:12:34.581933 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.954539 4763 request.go:700] Waited for 1.711238849s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957782 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957823 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d9bc36e-6135-431d-9eac-eae00ee40a18-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957847 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957866 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-config\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957886 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957930 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-service-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957950 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957971 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-serving-cert\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.957990 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-client\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958024 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2408364-68f3-4d8d-9cce-22a25d841f6d-machine-approver-tls\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958057 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958083 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-auth-proxy-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/002fe870-57dd-4d32-ad54-a093ca95c088-metrics-tls\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958126 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrld8\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958148 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9bc36e-6135-431d-9eac-eae00ee40a18-config\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958190 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958229 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958246 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958274 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlsbd\" (UniqueName: \"kubernetes.io/projected/d2408364-68f3-4d8d-9cce-22a25d841f6d-kube-api-access-dlsbd\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958366 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958491 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcp88\" (UniqueName: \"kubernetes.io/projected/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-kube-api-access-jcp88\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958518 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krhxz\" (UniqueName: \"kubernetes.io/projected/69142441-e9e9-483d-b8ea-a6ad02792eab-kube-api-access-krhxz\") pod \"downloads-7954f5f757-4xfrk\" (UID: \"69142441-e9e9-483d-b8ea-a6ad02792eab\") " pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958571 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-config\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958597 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d9bc36e-6135-431d-9eac-eae00ee40a18-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958627 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958656 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sptsz\" (UniqueName: \"kubernetes.io/projected/002fe870-57dd-4d32-ad54-a093ca95c088-kube-api-access-sptsz\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958752 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.958821 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.961120 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.961653 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.962509 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.962889 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.963163 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.963171 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.963476 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.963784 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.964182 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.964593 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.964895 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.974545 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.975099 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.979382 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.979648 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.980297 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.981023 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.981390 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: E1206 08:12:35.982039 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.482022049 +0000 UTC m=+39.057727137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982083 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982458 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982588 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982718 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982819 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982832 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982964 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.982998 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983024 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983071 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983364 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983610 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983636 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.983834 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.984217 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.984800 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.985051 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.986431 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hwfm\" (UniqueName: \"kubernetes.io/projected/2c40e17b-58b1-423e-9d70-c29ec900e983-kube-api-access-6hwfm\") pod \"apiserver-7bbb656c7d-6xcs6\" (UID: \"2c40e17b-58b1-423e-9d70-c29ec900e983\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.989445 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92zxz\" (UniqueName: \"kubernetes.io/projected/cd85f87f-ee5b-4f4f-be5f-34ffd3142319-kube-api-access-92zxz\") pod \"authentication-operator-69f744f599-w4ffb\" (UID: \"cd85f87f-ee5b-4f4f-be5f-34ffd3142319\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.990525 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.990845 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.990990 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 06 08:12:35 crc kubenswrapper[4763]: I1206 08:12:35.991436 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.002701 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.002935 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.003482 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004125 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004249 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004268 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004397 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004509 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004615 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004646 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004647 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q2qn\" (UniqueName: \"kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn\") pod \"controller-manager-879f6c89f-lfzzr\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004700 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004768 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004805 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004931 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004985 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.004939 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005153 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005162 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005252 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005270 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005402 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.002708 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005518 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005616 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.005726 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.006057 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.006776 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nlch\" (UniqueName: \"kubernetes.io/projected/bf456eda-cf8e-4084-a34e-2f8cdcac6f11-kube-api-access-2nlch\") pod \"apiserver-76f77b778f-l9dxz\" (UID: \"bf456eda-cf8e-4084-a34e-2f8cdcac6f11\") " pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.010827 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.011400 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.012342 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g4dd\" (UniqueName: \"kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd\") pod \"oauth-openshift-558db77b4-lrzmx\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.015062 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n2zr\" (UniqueName: \"kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr\") pod \"route-controller-manager-6576b87f9c-nvxmd\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.022727 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.042451 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.062295 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067523 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067655 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067690 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-registration-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067714 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-service-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067730 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a286baf-47df-45d6-9ad3-25868bf62367-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067748 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q6kj\" (UniqueName: \"kubernetes.io/projected/1bcaf02d-6524-4656-8e17-46ad975fc850-kube-api-access-4q6kj\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.067778 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.567750685 +0000 UTC m=+39.143455743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067815 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067912 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95ql7\" (UniqueName: \"kubernetes.io/projected/976ea0cd-771c-4eb3-8163-87942bcf49f2-kube-api-access-95ql7\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067948 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-client\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068400 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.067975 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/50828027-891a-487f-acf3-cc7eba748959-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068722 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068747 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068776 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-stats-auth\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068809 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-service-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068841 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/976ea0cd-771c-4eb3-8163-87942bcf49f2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068869 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068926 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-default-certificate\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068948 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-auth-proxy-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.068965 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e007d3e-e776-4374-ba48-232d19ff421f-tmpfs\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069011 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6m64\" (UniqueName: \"kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069042 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-config-volume\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069070 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a286baf-47df-45d6-9ad3-25868bf62367-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069095 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069118 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-proxy-tls\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069136 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wkq\" (UniqueName: \"kubernetes.io/projected/8be954b6-c6f3-4932-992d-736df8a687ce-kube-api-access-98wkq\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069139 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069161 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrld8\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069197 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9bc36e-6135-431d-9eac-eae00ee40a18-config\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069227 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6759c28b-a57c-4263-980a-0a8476e579dc-proxy-tls\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069249 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-plugins-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069280 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-apiservice-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069362 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069382 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-mountpoint-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069431 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zd5v\" (UniqueName: \"kubernetes.io/projected/b0833380-d571-4a7e-8330-b6b88b7ffc3c-kube-api-access-2zd5v\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069465 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f46918db-01bb-47d9-9290-cbda03d34cfc-trusted-ca\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069482 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b77683f-2dfe-43e0-a5bf-ce618a203c50-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069501 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069525 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-socket-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069561 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-srv-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069605 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jn9h\" (UniqueName: \"kubernetes.io/projected/e35ce166-8cb5-4419-b4db-09f13a65daf2-kube-api-access-7jn9h\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069633 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcp88\" (UniqueName: \"kubernetes.io/projected/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-kube-api-access-jcp88\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069680 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-config\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069716 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-metrics-certs\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069734 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-certs\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069763 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d9bc36e-6135-431d-9eac-eae00ee40a18-config\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069880 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d9bc36e-6135-431d-9eac-eae00ee40a18-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.069951 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/75d01498-9991-4f99-83ac-c24fdee94ebe-cert\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070022 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070051 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070128 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070158 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sptsz\" (UniqueName: \"kubernetes.io/projected/002fe870-57dd-4d32-ad54-a093ca95c088-kube-api-access-sptsz\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070222 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-config\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070253 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2408364-68f3-4d8d-9cce-22a25d841f6d-auth-proxy-config\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070264 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71f89833-600c-4231-bc08-2a784591e6d8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070286 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcaf02d-6524-4656-8e17-46ad975fc850-config\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070320 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070349 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d5457ee2-7607-40da-8cc5-b053a899760a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070383 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070446 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070465 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070485 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz9fj\" (UniqueName: \"kubernetes.io/projected/8a286baf-47df-45d6-9ad3-25868bf62367-kube-api-access-sz9fj\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070545 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6759c28b-a57c-4263-980a-0a8476e579dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070612 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070710 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0833380-d571-4a7e-8330-b6b88b7ffc3c-serving-cert\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070753 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/976ea0cd-771c-4eb3-8163-87942bcf49f2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070780 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070801 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.070820 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.070838 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.570825198 +0000 UTC m=+39.146530296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071042 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-ca\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071106 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d9bc36e-6135-431d-9eac-eae00ee40a18-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-webhook-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071156 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071171 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-942hb\" (UniqueName: \"kubernetes.io/projected/6e007d3e-e776-4374-ba48-232d19ff421f-kube-api-access-942hb\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071198 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-config\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071383 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-config\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071404 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071470 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bcaf02d-6524-4656-8e17-46ad975fc850-serving-cert\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071506 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071529 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b77683f-2dfe-43e0-a5bf-ce618a203c50-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071568 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071602 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqdgv\" (UniqueName: \"kubernetes.io/projected/aee81022-2ddd-4213-9010-38b1141fc399-kube-api-access-cqdgv\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071626 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071720 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-serving-cert\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071826 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2408364-68f3-4d8d-9cce-22a25d841f6d-machine-approver-tls\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071914 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28bgj\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-kube-api-access-28bgj\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.071982 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be954b6-c6f3-4932-992d-736df8a687ce-service-ca-bundle\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072035 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtmss\" (UniqueName: \"kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072085 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/002fe870-57dd-4d32-ad54-a093ca95c088-metrics-tls\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072123 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqbhs\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-kube-api-access-kqbhs\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072144 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpktr\" (UniqueName: \"kubernetes.io/projected/6759c28b-a57c-4263-980a-0a8476e579dc-kube-api-access-zpktr\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072174 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072197 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072244 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/db212622-3d09-4dac-9144-e509e64a9b48-signing-cabundle\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072265 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ch8n\" (UniqueName: \"kubernetes.io/projected/db212622-3d09-4dac-9144-e509e64a9b48-kube-api-access-7ch8n\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072285 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztpgs\" (UniqueName: \"kubernetes.io/projected/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-kube-api-access-ztpgs\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072312 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlsbd\" (UniqueName: \"kubernetes.io/projected/d2408364-68f3-4d8d-9cce-22a25d841f6d-kube-api-access-dlsbd\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072355 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-metrics-tls\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072378 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rrs4\" (UniqueName: \"kubernetes.io/projected/75d01498-9991-4f99-83ac-c24fdee94ebe-kube-api-access-5rrs4\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072418 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-node-bootstrap-token\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072475 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krhxz\" (UniqueName: \"kubernetes.io/projected/69142441-e9e9-483d-b8ea-a6ad02792eab-kube-api-access-krhxz\") pod \"downloads-7954f5f757-4xfrk\" (UID: \"69142441-e9e9-483d-b8ea-a6ad02792eab\") " pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072499 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv745\" (UniqueName: \"kubernetes.io/projected/71f89833-600c-4231-bc08-2a784591e6d8-kube-api-access-nv745\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072527 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/db212622-3d09-4dac-9144-e509e64a9b48-signing-key\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072546 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-trusted-ca\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072570 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdkx8\" (UniqueName: \"kubernetes.io/projected/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-kube-api-access-jdkx8\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072592 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-profile-collector-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072638 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f46918db-01bb-47d9-9290-cbda03d34cfc-metrics-tls\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072659 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-profile-collector-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072696 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072948 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-images\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072969 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-config\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.072987 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p477d\" (UniqueName: \"kubernetes.io/projected/28ac2e3e-807e-4d30-8775-de3438b3dee5-kube-api-access-p477d\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073044 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e35ce166-8cb5-4419-b4db-09f13a65daf2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073087 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb9ct\" (UniqueName: \"kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073111 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm8kn\" (UniqueName: \"kubernetes.io/projected/6b9c2185-85d0-45db-bb41-44bbf526a10d-kube-api-access-gm8kn\") pod \"migrator-59844c95c7-6mtn5\" (UID: \"6b9c2185-85d0-45db-bb41-44bbf526a10d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073362 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2d9bc36e-6135-431d-9eac-eae00ee40a18-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073600 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zcm6\" (UniqueName: \"kubernetes.io/projected/1e42298d-84d9-4aca-9893-394efa00acec-kube-api-access-6zcm6\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073755 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073816 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wjfz\" (UniqueName: \"kubernetes.io/projected/50828027-891a-487f-acf3-cc7eba748959-kube-api-access-2wjfz\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073842 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqgtv\" (UniqueName: \"kubernetes.io/projected/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-kube-api-access-tqgtv\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073868 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073889 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-srv-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073945 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073976 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sqts\" (UniqueName: \"kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.073994 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr6jq\" (UniqueName: \"kubernetes.io/projected/d5457ee2-7607-40da-8cc5-b053a899760a-kube-api-access-sr6jq\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.074011 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-csi-data-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.074037 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.074727 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.075338 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.075378 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-etcd-client\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.076203 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/002fe870-57dd-4d32-ad54-a093ca95c088-metrics-tls\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.076227 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-serving-cert\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.076708 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.076717 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.077271 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2408364-68f3-4d8d-9cce-22a25d841f6d-machine-approver-tls\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.083708 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.103414 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.107624 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.123026 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.138701 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.142161 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.154340 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.160969 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.162631 4763 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.175309 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.175475 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-config-volume\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.175508 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a286baf-47df-45d6-9ad3-25868bf62367-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.175525 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.175542 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-proxy-tls\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.175571 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.675545236 +0000 UTC m=+39.251250274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.176516 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-config-volume\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.176577 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.176653 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a286baf-47df-45d6-9ad3-25868bf62367-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.176755 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wkq\" (UniqueName: \"kubernetes.io/projected/8be954b6-c6f3-4932-992d-736df8a687ce-kube-api-access-98wkq\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177025 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6759c28b-a57c-4263-980a-0a8476e579dc-proxy-tls\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177337 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-plugins-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177426 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-plugins-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177471 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-apiservice-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177523 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-mountpoint-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177568 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zd5v\" (UniqueName: \"kubernetes.io/projected/b0833380-d571-4a7e-8330-b6b88b7ffc3c-kube-api-access-2zd5v\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177597 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f46918db-01bb-47d9-9290-cbda03d34cfc-trusted-ca\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177615 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-mountpoint-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177623 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b77683f-2dfe-43e0-a5bf-ce618a203c50-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177648 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177702 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-socket-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177740 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-srv-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177766 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jn9h\" (UniqueName: \"kubernetes.io/projected/e35ce166-8cb5-4419-b4db-09f13a65daf2-kube-api-access-7jn9h\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177803 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-metrics-certs\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177825 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-certs\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177855 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/75d01498-9991-4f99-83ac-c24fdee94ebe-cert\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177885 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177921 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177956 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71f89833-600c-4231-bc08-2a784591e6d8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.177980 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcaf02d-6524-4656-8e17-46ad975fc850-config\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178006 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178033 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d5457ee2-7607-40da-8cc5-b053a899760a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178057 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178096 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178124 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz9fj\" (UniqueName: \"kubernetes.io/projected/8a286baf-47df-45d6-9ad3-25868bf62367-kube-api-access-sz9fj\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178151 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6759c28b-a57c-4263-980a-0a8476e579dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178492 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178568 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-socket-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178606 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.178911 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.678879206 +0000 UTC m=+39.254584304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178955 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.178999 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f46918db-01bb-47d9-9290-cbda03d34cfc-trusted-ca\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179017 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179047 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0833380-d571-4a7e-8330-b6b88b7ffc3c-serving-cert\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179077 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/976ea0cd-771c-4eb3-8163-87942bcf49f2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179101 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179193 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179585 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcaf02d-6524-4656-8e17-46ad975fc850-config\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179828 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.180775 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-apiservice-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181024 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181130 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.179222 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-webhook-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181186 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-942hb\" (UniqueName: \"kubernetes.io/projected/6e007d3e-e776-4374-ba48-232d19ff421f-kube-api-access-942hb\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181214 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-config\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181242 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181272 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bcaf02d-6524-4656-8e17-46ad975fc850-serving-cert\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181296 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181321 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/976ea0cd-771c-4eb3-8163-87942bcf49f2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181324 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b77683f-2dfe-43e0-a5bf-ce618a203c50-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181377 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181404 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqdgv\" (UniqueName: \"kubernetes.io/projected/aee81022-2ddd-4213-9010-38b1141fc399-kube-api-access-cqdgv\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181427 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6759c28b-a57c-4263-980a-0a8476e579dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181792 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71f89833-600c-4231-bc08-2a784591e6d8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.181427 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182043 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28bgj\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-kube-api-access-28bgj\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182072 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be954b6-c6f3-4932-992d-736df8a687ce-service-ca-bundle\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182101 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtmss\" (UniqueName: \"kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182127 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqbhs\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-kube-api-access-kqbhs\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182151 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpktr\" (UniqueName: \"kubernetes.io/projected/6759c28b-a57c-4263-980a-0a8476e579dc-kube-api-access-zpktr\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182160 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-proxy-tls\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182178 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/db212622-3d09-4dac-9144-e509e64a9b48-signing-cabundle\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182202 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ch8n\" (UniqueName: \"kubernetes.io/projected/db212622-3d09-4dac-9144-e509e64a9b48-kube-api-access-7ch8n\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182261 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztpgs\" (UniqueName: \"kubernetes.io/projected/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-kube-api-access-ztpgs\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182302 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-metrics-tls\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182328 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rrs4\" (UniqueName: \"kubernetes.io/projected/75d01498-9991-4f99-83ac-c24fdee94ebe-kube-api-access-5rrs4\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182390 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-node-bootstrap-token\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv745\" (UniqueName: \"kubernetes.io/projected/71f89833-600c-4231-bc08-2a784591e6d8-kube-api-access-nv745\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182447 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/db212622-3d09-4dac-9144-e509e64a9b48-signing-key\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182489 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-trusted-ca\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182487 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b77683f-2dfe-43e0-a5bf-ce618a203c50-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182514 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdkx8\" (UniqueName: \"kubernetes.io/projected/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-kube-api-access-jdkx8\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182535 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-profile-collector-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182590 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f46918db-01bb-47d9-9290-cbda03d34cfc-metrics-tls\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182613 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-profile-collector-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182643 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182665 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-images\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p477d\" (UniqueName: \"kubernetes.io/projected/28ac2e3e-807e-4d30-8775-de3438b3dee5-kube-api-access-p477d\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182725 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e35ce166-8cb5-4419-b4db-09f13a65daf2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182788 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb9ct\" (UniqueName: \"kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182789 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/75d01498-9991-4f99-83ac-c24fdee94ebe-cert\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182810 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm8kn\" (UniqueName: \"kubernetes.io/projected/6b9c2185-85d0-45db-bb41-44bbf526a10d-kube-api-access-gm8kn\") pod \"migrator-59844c95c7-6mtn5\" (UID: \"6b9c2185-85d0-45db-bb41-44bbf526a10d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182856 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zcm6\" (UniqueName: \"kubernetes.io/projected/1e42298d-84d9-4aca-9893-394efa00acec-kube-api-access-6zcm6\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182917 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8be954b6-c6f3-4932-992d-736df8a687ce-service-ca-bundle\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182922 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182966 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wjfz\" (UniqueName: \"kubernetes.io/projected/50828027-891a-487f-acf3-cc7eba748959-kube-api-access-2wjfz\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.182985 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqgtv\" (UniqueName: \"kubernetes.io/projected/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-kube-api-access-tqgtv\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183002 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183019 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-srv-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183038 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sqts\" (UniqueName: \"kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183247 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr6jq\" (UniqueName: \"kubernetes.io/projected/d5457ee2-7607-40da-8cc5-b053a899760a-kube-api-access-sr6jq\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183264 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-csi-data-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183283 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183301 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-registration-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183323 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a286baf-47df-45d6-9ad3-25868bf62367-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183339 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q6kj\" (UniqueName: \"kubernetes.io/projected/1bcaf02d-6524-4656-8e17-46ad975fc850-kube-api-access-4q6kj\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183359 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95ql7\" (UniqueName: \"kubernetes.io/projected/976ea0cd-771c-4eb3-8163-87942bcf49f2-kube-api-access-95ql7\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183376 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/50828027-891a-487f-acf3-cc7eba748959-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183394 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183408 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183426 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-stats-auth\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183453 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/976ea0cd-771c-4eb3-8163-87942bcf49f2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183483 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-default-certificate\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183500 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e007d3e-e776-4374-ba48-232d19ff421f-tmpfs\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.183516 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6m64\" (UniqueName: \"kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.185740 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-srv-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.185759 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.185848 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-config\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.186040 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.186435 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6759c28b-a57c-4263-980a-0a8476e579dc-proxy-tls\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.186792 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-certs\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.187069 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/db212622-3d09-4dac-9144-e509e64a9b48-signing-cabundle\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.187433 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.188378 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d5457ee2-7607-40da-8cc5-b053a899760a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.188442 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0833380-d571-4a7e-8330-b6b88b7ffc3c-serving-cert\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.188714 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-metrics-tls\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.188765 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e42298d-84d9-4aca-9893-394efa00acec-node-bootstrap-token\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.189182 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-registration-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.189477 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0833380-d571-4a7e-8330-b6b88b7ffc3c-trusted-ca\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.190037 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-images\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.190335 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.190399 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-srv-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.190507 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/28ac2e3e-807e-4d30-8775-de3438b3dee5-csi-data-dir\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.190847 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.192135 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bcaf02d-6524-4656-8e17-46ad975fc850-serving-cert\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.192377 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/50828027-891a-487f-acf3-cc7eba748959-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.194463 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-default-certificate\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.194762 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.194869 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/aee81022-2ddd-4213-9010-38b1141fc399-profile-collector-cert\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.196418 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0b77683f-2dfe-43e0-a5bf-ce618a203c50-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.196464 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e35ce166-8cb5-4419-b4db-09f13a65daf2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.196796 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a286baf-47df-45d6-9ad3-25868bf62367-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.196887 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.197331 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-stats-auth\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.197521 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.198114 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.198406 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8be954b6-c6f3-4932-992d-736df8a687ce-metrics-certs\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.198861 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f46918db-01bb-47d9-9290-cbda03d34cfc-metrics-tls\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.199011 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/db212622-3d09-4dac-9144-e509e64a9b48-signing-key\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.200349 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/976ea0cd-771c-4eb3-8163-87942bcf49f2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.200770 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-profile-collector-cert\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.202464 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.213555 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.222391 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e007d3e-e776-4374-ba48-232d19ff421f-tmpfs\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.222688 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.222800 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e007d3e-e776-4374-ba48-232d19ff421f-webhook-cert\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.228478 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.240492 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrf7f\" (UniqueName: \"kubernetes.io/projected/75bdca6d-084a-4898-bf07-5371cf477720-kube-api-access-qrf7f\") pod \"openshift-config-operator-7777fb866f-4gtpp\" (UID: \"75bdca6d-084a-4898-bf07-5371cf477720\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.258275 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmpjf\" (UniqueName: \"kubernetes.io/projected/29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0-kube-api-access-xmpjf\") pod \"openshift-apiserver-operator-796bbdcf4f-qvrcf\" (UID: \"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.283002 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.284985 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g85cl\" (UniqueName: \"kubernetes.io/projected/0d8b8c08-d283-4ca2-aed3-2fcb7637ac91-kube-api-access-g85cl\") pod \"machine-api-operator-5694c8668f-7s62v\" (UID: \"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.285385 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.286044 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.786025059 +0000 UTC m=+39.361730097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.302285 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.305951 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.322610 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.342583 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.363750 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.381192 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.382700 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.386864 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.387205 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.887188602 +0000 UTC m=+39.462893640 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.440331 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.454043 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l9dxz"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.457718 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrld8\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: W1206 08:12:36.472331 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf456eda_cf8e_4084_a34e_2f8cdcac6f11.slice/crio-692c1e10de4efd87dbaccc908f3487d961d8bcddc1b7d0f5904eb7a0f8782094 WatchSource:0}: Error finding container 692c1e10de4efd87dbaccc908f3487d961d8bcddc1b7d0f5904eb7a0f8782094: Status 404 returned error can't find the container with id 692c1e10de4efd87dbaccc908f3487d961d8bcddc1b7d0f5904eb7a0f8782094 Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.479852 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcp88\" (UniqueName: \"kubernetes.io/projected/a49a405c-5eb4-4e9e-9371-d4e00c2e8e04-kube-api-access-jcp88\") pod \"etcd-operator-b45778765-nzw6k\" (UID: \"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.487427 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.488064 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:36.988036436 +0000 UTC m=+39.563741504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.496359 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d9bc36e-6135-431d-9eac-eae00ee40a18-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w8f9q\" (UID: \"2d9bc36e-6135-431d-9eac-eae00ee40a18\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.515245 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/041c7ad7-43e5-4dfc-898f-845db4f5fa2e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w55pc\" (UID: \"041c7ad7-43e5-4dfc-898f-845db4f5fa2e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.537652 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sptsz\" (UniqueName: \"kubernetes.io/projected/002fe870-57dd-4d32-ad54-a093ca95c088-kube-api-access-sptsz\") pod \"dns-operator-744455d44c-8qvr9\" (UID: \"002fe870-57dd-4d32-ad54-a093ca95c088\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.547865 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.560033 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krhxz\" (UniqueName: \"kubernetes.io/projected/69142441-e9e9-483d-b8ea-a6ad02792eab-kube-api-access-krhxz\") pod \"downloads-7954f5f757-4xfrk\" (UID: \"69142441-e9e9-483d-b8ea-a6ad02792eab\") " pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.574725 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.575567 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlsbd\" (UniqueName: \"kubernetes.io/projected/d2408364-68f3-4d8d-9cce-22a25d841f6d-kube-api-access-dlsbd\") pod \"machine-approver-56656f9798-q4sl9\" (UID: \"d2408364-68f3-4d8d-9cce-22a25d841f6d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.578451 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" Dec 06 08:12:36 crc kubenswrapper[4763]: W1206 08:12:36.580778 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29cfb2a4_1a77_4b0b_8165_37df0ae8d3c0.slice/crio-7a71ecff367de71942848668e4e6904de08106267c1e9c0c0ba4fa0de5db05fa WatchSource:0}: Error finding container 7a71ecff367de71942848668e4e6904de08106267c1e9c0c0ba4fa0de5db05fa: Status 404 returned error can't find the container with id 7a71ecff367de71942848668e4e6904de08106267c1e9c0c0ba4fa0de5db05fa Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.590354 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.590848 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.090829511 +0000 UTC m=+39.666534549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.594709 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wkq\" (UniqueName: \"kubernetes.io/projected/8be954b6-c6f3-4932-992d-736df8a687ce-kube-api-access-98wkq\") pod \"router-default-5444994796-mt26w\" (UID: \"8be954b6-c6f3-4932-992d-736df8a687ce\") " pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.616709 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zd5v\" (UniqueName: \"kubernetes.io/projected/b0833380-d571-4a7e-8330-b6b88b7ffc3c-kube-api-access-2zd5v\") pod \"console-operator-58897d9998-bf86d\" (UID: \"b0833380-d571-4a7e-8330-b6b88b7ffc3c\") " pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.624018 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.643443 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5de53d3f-9ac0-4b3b-aa31-015ccdb83fda-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sfrgp\" (UID: \"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.657177 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jn9h\" (UniqueName: \"kubernetes.io/projected/e35ce166-8cb5-4419-b4db-09f13a65daf2-kube-api-access-7jn9h\") pod \"control-plane-machine-set-operator-78cbb6b69f-7d7bl\" (UID: \"e35ce166-8cb5-4419-b4db-09f13a65daf2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.668134 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.686216 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.691358 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.691535 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.191511171 +0000 UTC m=+39.767216209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.691587 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.691713 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-w4ffb"] Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.691884 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.1918772 +0000 UTC m=+39.767582238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.692805 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lrzmx"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.693730 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.698678 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.701488 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.702105 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqdgv\" (UniqueName: \"kubernetes.io/projected/aee81022-2ddd-4213-9010-38b1141fc399-kube-api-access-cqdgv\") pod \"olm-operator-6b444d44fb-prdgw\" (UID: \"aee81022-2ddd-4213-9010-38b1141fc399\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.721813 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz9fj\" (UniqueName: \"kubernetes.io/projected/8a286baf-47df-45d6-9ad3-25868bf62367-kube-api-access-sz9fj\") pod \"openshift-controller-manager-operator-756b6f6bc6-znd8z\" (UID: \"8a286baf-47df-45d6-9ad3-25868bf62367\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.723615 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.748400 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.748812 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.750016 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtmss\" (UniqueName: \"kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss\") pod \"marketplace-operator-79b997595-gxwtv\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.751433 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.763296 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.769426 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28bgj\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-kube-api-access-28bgj\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.776103 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.777081 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm8kn\" (UniqueName: \"kubernetes.io/projected/6b9c2185-85d0-45db-bb41-44bbf526a10d-kube-api-access-gm8kn\") pod \"migrator-59844c95c7-6mtn5\" (UID: \"6b9c2185-85d0-45db-bb41-44bbf526a10d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.782068 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.793032 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.793499 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.293479815 +0000 UTC m=+39.869184853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.798073 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rrs4\" (UniqueName: \"kubernetes.io/projected/75d01498-9991-4f99-83ac-c24fdee94ebe-kube-api-access-5rrs4\") pod \"ingress-canary-2df96\" (UID: \"75d01498-9991-4f99-83ac-c24fdee94ebe\") " pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.816638 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-942hb\" (UniqueName: \"kubernetes.io/projected/6e007d3e-e776-4374-ba48-232d19ff421f-kube-api-access-942hb\") pod \"packageserver-d55dfcdfc-qljhm\" (UID: \"6e007d3e-e776-4374-ba48-232d19ff421f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.818162 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s62v"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.836183 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.840914 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wjfz\" (UniqueName: \"kubernetes.io/projected/50828027-891a-487f-acf3-cc7eba748959-kube-api-access-2wjfz\") pod \"package-server-manager-789f6589d5-xl75m\" (UID: \"50828027-891a-487f-acf3-cc7eba748959\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.846532 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.852234 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.857047 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.858128 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqgtv\" (UniqueName: \"kubernetes.io/projected/d0faf76f-17ec-4dca-a7a3-8b1f96b3b133-kube-api-access-tqgtv\") pod \"catalog-operator-68c6474976-mpq2j\" (UID: \"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.865480 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.880533 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqbhs\" (UniqueName: \"kubernetes.io/projected/0b77683f-2dfe-43e0-a5bf-ce618a203c50-kube-api-access-kqbhs\") pod \"cluster-image-registry-operator-dc59b4c8b-xrrcl\" (UID: \"0b77683f-2dfe-43e0-a5bf-ce618a203c50\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.883219 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2df96" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.890372 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4xfrk"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.895505 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.895556 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.895859 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.395843769 +0000 UTC m=+39.971548807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.897647 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dcd65fd5-43dc-42a9-84d9-e37bb8e220af-metrics-certs\") pod \"network-metrics-daemon-p2rk6\" (UID: \"dcd65fd5-43dc-42a9-84d9-e37bb8e220af\") " pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.906534 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p2rk6" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.914045 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ch8n\" (UniqueName: \"kubernetes.io/projected/db212622-3d09-4dac-9144-e509e64a9b48-kube-api-access-7ch8n\") pod \"service-ca-9c57cc56f-dbhnr\" (UID: \"db212622-3d09-4dac-9144-e509e64a9b48\") " pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.976596 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q6kj\" (UniqueName: \"kubernetes.io/projected/1bcaf02d-6524-4656-8e17-46ad975fc850-kube-api-access-4q6kj\") pod \"service-ca-operator-777779d784-p68hx\" (UID: \"1bcaf02d-6524-4656-8e17-46ad975fc850\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.985066 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc"] Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.986094 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" event={"ID":"2d9bc36e-6135-431d-9eac-eae00ee40a18","Type":"ContainerStarted","Data":"c259ecac11c10c405d4af9ee511e27620fe345033ad7defe24deda5b0f5aaf1f"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.986917 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" event={"ID":"75bdca6d-084a-4898-bf07-5371cf477720","Type":"ContainerStarted","Data":"cb0bc9e4ac0686c6f693c255a1b6ae348f6539483f330c08d4bc259d1d611c34"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.989279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mt26w" event={"ID":"8be954b6-c6f3-4932-992d-736df8a687ce","Type":"ContainerStarted","Data":"d02d74f7523e4ec373aebf5524c1467248aff31589b6c40bd2f264cd454e003d"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.990272 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" event={"ID":"2c40e17b-58b1-423e-9d70-c29ec900e983","Type":"ContainerStarted","Data":"0bc7d58708721a5da5ec962695a99461e58939cd15200e5c401bfa940a0dca9d"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.991069 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" event={"ID":"bf456eda-cf8e-4084-a34e-2f8cdcac6f11","Type":"ContainerStarted","Data":"692c1e10de4efd87dbaccc908f3487d961d8bcddc1b7d0f5904eb7a0f8782094"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.991831 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" event={"ID":"cd85f87f-ee5b-4f4f-be5f-34ffd3142319","Type":"ContainerStarted","Data":"3ac0555d0f51832c210cadb003c923a1799668ad6b0084dabadcb7badd857f35"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.992574 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" event={"ID":"c6450aca-625d-4980-b576-8e24a98b87d8","Type":"ContainerStarted","Data":"769df3d243aa3e0db12dd99267c522f00ecda8b5b7113b93c1d67d9686094d3a"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.993436 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" event={"ID":"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce","Type":"ContainerStarted","Data":"1700f66293ec65fda97f2fef752796f4b4caf923d3a06a431ce0dfc5f1ae82c0"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.994160 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" event={"ID":"66731e3f-7796-4ca9-a290-0b1f8ce568c6","Type":"ContainerStarted","Data":"12649fa95d3a5d8b9e74cbe1b265636633c99ac79eb57317ed31f7352e3f86fd"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.995537 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" event={"ID":"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0","Type":"ContainerStarted","Data":"7a71ecff367de71942848668e4e6904de08106267c1e9c0c0ba4fa0de5db05fa"} Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.996907 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.996971 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95ql7\" (UniqueName: \"kubernetes.io/projected/976ea0cd-771c-4eb3-8163-87942bcf49f2-kube-api-access-95ql7\") pod \"kube-storage-version-migrator-operator-b67b599dd-q629c\" (UID: \"976ea0cd-771c-4eb3-8163-87942bcf49f2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.997101 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.497076393 +0000 UTC m=+40.072781471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:36 crc kubenswrapper[4763]: I1206 08:12:36.997625 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:36 crc kubenswrapper[4763]: E1206 08:12:36.997962 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.497945807 +0000 UTC m=+40.073650845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.009451 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.013328 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f46918db-01bb-47d9-9290-cbda03d34cfc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pcqzd\" (UID: \"f46918db-01bb-47d9-9290-cbda03d34cfc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.030397 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.035159 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr6jq\" (UniqueName: \"kubernetes.io/projected/d5457ee2-7607-40da-8cc5-b053a899760a-kube-api-access-sr6jq\") pod \"multus-admission-controller-857f4d67dd-rppzh\" (UID: \"d5457ee2-7607-40da-8cc5-b053a899760a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.049098 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.054940 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.060934 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sqts\" (UniqueName: \"kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts\") pod \"console-f9d7485db-7j4g2\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.075977 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zcm6\" (UniqueName: \"kubernetes.io/projected/1e42298d-84d9-4aca-9893-394efa00acec-kube-api-access-6zcm6\") pod \"machine-config-server-5wtjb\" (UID: \"1e42298d-84d9-4aca-9893-394efa00acec\") " pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.088595 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.094962 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.096826 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p477d\" (UniqueName: \"kubernetes.io/projected/28ac2e3e-807e-4d30-8775-de3438b3dee5-kube-api-access-p477d\") pod \"csi-hostpathplugin-b9lvw\" (UID: \"28ac2e3e-807e-4d30-8775-de3438b3dee5\") " pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.098704 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.098871 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.598853722 +0000 UTC m=+40.174558760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.098950 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.099300 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.599291504 +0000 UTC m=+40.174996542 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.101732 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.114095 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.121541 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdkx8\" (UniqueName: \"kubernetes.io/projected/4323dce1-5bfd-48a6-ba50-85a35f5a53f7-kube-api-access-jdkx8\") pod \"dns-default-8m2kx\" (UID: \"4323dce1-5bfd-48a6-ba50-85a35f5a53f7\") " pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.121725 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.129437 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.140838 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb9ct\" (UniqueName: \"kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct\") pod \"collect-profiles-29416800-7c57f\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.150919 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qvr9"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.151869 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nzw6k"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.154022 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztpgs\" (UniqueName: \"kubernetes.io/projected/6adfe482-7fa8-4992-bf3a-efae3f8eb8f8-kube-api-access-ztpgs\") pod \"machine-config-operator-74547568cd-pflsw\" (UID: \"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.154239 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpktr\" (UniqueName: \"kubernetes.io/projected/6759c28b-a57c-4263-980a-0a8476e579dc-kube-api-access-zpktr\") pod \"machine-config-controller-84d6567774-9snzx\" (UID: \"6759c28b-a57c-4263-980a-0a8476e579dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.157166 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6m64\" (UniqueName: \"kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64\") pod \"cni-sysctl-allowlist-ds-ln8pp\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.157218 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv745\" (UniqueName: \"kubernetes.io/projected/71f89833-600c-4231-bc08-2a784591e6d8-kube-api-access-nv745\") pod \"cluster-samples-operator-665b6dd947-58vnh\" (UID: \"71f89833-600c-4231-bc08-2a784591e6d8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.176225 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5wtjb" Dec 06 08:12:37 crc kubenswrapper[4763]: W1206 08:12:37.181518 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d8b8c08_d283_4ca2_aed3_2fcb7637ac91.slice/crio-bef7eb091147e9e85e03bb7e7f59ebfcb053ced8cc6d95d42afed9815698e1f8 WatchSource:0}: Error finding container bef7eb091147e9e85e03bb7e7f59ebfcb053ced8cc6d95d42afed9815698e1f8: Status 404 returned error can't find the container with id bef7eb091147e9e85e03bb7e7f59ebfcb053ced8cc6d95d42afed9815698e1f8 Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.192706 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.199644 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.199786 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.200081 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.700057005 +0000 UTC m=+40.275762043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.200319 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.200624 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.700614161 +0000 UTC m=+40.276319199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.225247 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.300920 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.302040 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.802001799 +0000 UTC m=+40.377706837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.316951 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.334221 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.361755 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.369147 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.402745 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.403166 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:37.90315311 +0000 UTC m=+40.478858148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.408480 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.435589 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.506842 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.507037 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.007011106 +0000 UTC m=+40.582716144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.507291 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.507939 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.007890059 +0000 UTC m=+40.583595097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.609130 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.609201 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.109184345 +0000 UTC m=+40.684889383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.609736 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.610023 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.110014158 +0000 UTC m=+40.685719196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.615229 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.640986 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.644041 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-p2rk6"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.710846 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.711333 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.211304623 +0000 UTC m=+40.787009661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.745387 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.786053 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd"] Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.812394 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.812728 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.312709112 +0000 UTC m=+40.888414150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:37 crc kubenswrapper[4763]: W1206 08:12:37.860765 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5de53d3f_9ac0_4b3b_aa31_015ccdb83fda.slice/crio-9c9747a97de9c4195bb976a683519971312af1fbb5c85e7577bb00408ff1deb2 WatchSource:0}: Error finding container 9c9747a97de9c4195bb976a683519971312af1fbb5c85e7577bb00408ff1deb2: Status 404 returned error can't find the container with id 9c9747a97de9c4195bb976a683519971312af1fbb5c85e7577bb00408ff1deb2 Dec 06 08:12:37 crc kubenswrapper[4763]: W1206 08:12:37.884991 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b77683f_2dfe_43e0_a5bf_ce618a203c50.slice/crio-d0a271d1d8d3fbbf928b22db4259fc8cce54a03d0f03541410e27f06930258d8 WatchSource:0}: Error finding container d0a271d1d8d3fbbf928b22db4259fc8cce54a03d0f03541410e27f06930258d8: Status 404 returned error can't find the container with id d0a271d1d8d3fbbf928b22db4259fc8cce54a03d0f03541410e27f06930258d8 Dec 06 08:12:37 crc kubenswrapper[4763]: I1206 08:12:37.913145 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:37 crc kubenswrapper[4763]: E1206 08:12:37.913481 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.413467873 +0000 UTC m=+40.989172911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.004518 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" event={"ID":"f46918db-01bb-47d9-9290-cbda03d34cfc","Type":"ContainerStarted","Data":"84c75db056d2fe43f529472b1a022208140accd8ca1870107a2aaee1dd24a6c8"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.008308 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" event={"ID":"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91","Type":"ContainerStarted","Data":"bef7eb091147e9e85e03bb7e7f59ebfcb053ced8cc6d95d42afed9815698e1f8"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.015161 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.015574 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.515562661 +0000 UTC m=+41.091267699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.019513 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" event={"ID":"29cfb2a4-1a77-4b0b-8165-37df0ae8d3c0","Type":"ContainerStarted","Data":"dacccdfbc93c5e19e163a19a8c1fe9830af503c6d9de4d1c2c9d1d24519d23d3"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.028703 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" event={"ID":"cd85f87f-ee5b-4f4f-be5f-34ffd3142319","Type":"ContainerStarted","Data":"7bc445ee1e1ee5916d628cd92f7bf108378808777d05b480a183acef69eb697e"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.044110 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" event={"ID":"66731e3f-7796-4ca9-a290-0b1f8ce568c6","Type":"ContainerStarted","Data":"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.044655 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.053733 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p2rk6" event={"ID":"dcd65fd5-43dc-42a9-84d9-e37bb8e220af","Type":"ContainerStarted","Data":"55ea7daabc3cb8392b2920d2e58390bcad4b3734e687a24b8439c2ef9e3f8976"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.055846 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mt26w" event={"ID":"8be954b6-c6f3-4932-992d-736df8a687ce","Type":"ContainerStarted","Data":"39a459cef944e3321414ac9e22dea0712c50b3738754cc1d27f9dd458607f7ce"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.075997 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.077424 4763 generic.go:334] "Generic (PLEG): container finished" podID="bf456eda-cf8e-4084-a34e-2f8cdcac6f11" containerID="e30965c51d8a9bd5800b6beb53c2f2deb89655f4a4c8feff022262f985d19a42" exitCode=0 Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.077482 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" event={"ID":"bf456eda-cf8e-4084-a34e-2f8cdcac6f11","Type":"ContainerDied","Data":"e30965c51d8a9bd5800b6beb53c2f2deb89655f4a4c8feff022262f985d19a42"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.103026 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" event={"ID":"e35ce166-8cb5-4419-b4db-09f13a65daf2","Type":"ContainerStarted","Data":"ffe28ff56ba2f65956c5573348ab7e7963357b7f1c9491194a41d941db09931b"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.115957 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.116218 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.616195909 +0000 UTC m=+41.191900947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.116419 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.117570 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.617559366 +0000 UTC m=+41.193264404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.126383 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" event={"ID":"c6450aca-625d-4980-b576-8e24a98b87d8","Type":"ContainerStarted","Data":"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.127267 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.131299 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" event={"ID":"d2408364-68f3-4d8d-9cce-22a25d841f6d","Type":"ContainerStarted","Data":"0d3cd32fe3f3465ebf9444e56824be6f6186ba565bdb48d5cea254454f17263a"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.139984 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" event={"ID":"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda","Type":"ContainerStarted","Data":"9c9747a97de9c4195bb976a683519971312af1fbb5c85e7577bb00408ff1deb2"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.143279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5wtjb" event={"ID":"1e42298d-84d9-4aca-9893-394efa00acec","Type":"ContainerStarted","Data":"d4614c6d12c1a5f7202e3c15b55724eb390e829ea76597e652922c0838181c84"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.145208 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" event={"ID":"041c7ad7-43e5-4dfc-898f-845db4f5fa2e","Type":"ContainerStarted","Data":"10f10d152c67770140c0de1eca3a1c8361dd825efe76d7228a4072e0ed4e1306"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.152753 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" event={"ID":"aee81022-2ddd-4213-9010-38b1141fc399","Type":"ContainerStarted","Data":"77a421ba53fb749746dc66b4be09fe6b831d540727c41af20d48d71cc33be00c"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.155359 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bdca6d-084a-4898-bf07-5371cf477720" containerID="d2c1bb3d758dd707dd8c09d20bde3b929f9e3321703499c6ac53434de6ac91f0" exitCode=0 Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.155464 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" event={"ID":"75bdca6d-084a-4898-bf07-5371cf477720","Type":"ContainerDied","Data":"d2c1bb3d758dd707dd8c09d20bde3b929f9e3321703499c6ac53434de6ac91f0"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.161422 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4xfrk" event={"ID":"69142441-e9e9-483d-b8ea-a6ad02792eab","Type":"ContainerStarted","Data":"bcaa62652f06b37e3220bb5a3af2f5b2ee7b040df1669ab6b96666ae372ac98f"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.164733 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" event={"ID":"002fe870-57dd-4d32-ad54-a093ca95c088","Type":"ContainerStarted","Data":"9192abf47d9b6e459fd8873ee7bb26e64898e012a4284d1f28fbbdc7d5ca1461"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.175820 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" event={"ID":"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce","Type":"ContainerStarted","Data":"a92efb1a1057138e4569df3e062be09f070a912ac3c306b48fde3bd5a60ec382"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.176322 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.179756 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" event={"ID":"0b77683f-2dfe-43e0-a5bf-ce618a203c50","Type":"ContainerStarted","Data":"d0a271d1d8d3fbbf928b22db4259fc8cce54a03d0f03541410e27f06930258d8"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.180072 4763 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lrzmx container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.180108 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.203758 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" event={"ID":"954f9d07-1e99-48fa-bd6d-b61d22ee7faa","Type":"ContainerStarted","Data":"dfbc8f617779ef31295d559d2a5a230f449e5c08e106a0e2123c9b893eb25a12"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.206707 4763 generic.go:334] "Generic (PLEG): container finished" podID="2c40e17b-58b1-423e-9d70-c29ec900e983" containerID="c509a8be444083461c12a2395a65736fe56c2f9c1f0d7da6552d0e3043cc69ab" exitCode=0 Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.206780 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" event={"ID":"2c40e17b-58b1-423e-9d70-c29ec900e983","Type":"ContainerDied","Data":"c509a8be444083461c12a2395a65736fe56c2f9c1f0d7da6552d0e3043cc69ab"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.212502 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" event={"ID":"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04","Type":"ContainerStarted","Data":"fa01fc78733586c4438048f391f40d558c87ef1b5af571e25e5f9ae0aa1c8e4a"} Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.219559 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.219700 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.719668833 +0000 UTC m=+41.295373871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.220412 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.224103 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.724090973 +0000 UTC m=+41.299796011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.322631 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.323956 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.823939809 +0000 UTC m=+41.399644847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.423989 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.424720 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:38.924526106 +0000 UTC m=+41.500231144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.527110 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.527261 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.0272327 +0000 UTC m=+41.602937738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.527555 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.527825 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.027812916 +0000 UTC m=+41.603517954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.640524 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.640799 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.140782106 +0000 UTC m=+41.716487144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.724087 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.734729 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:38 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:38 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:38 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.734804 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.742974 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.743348 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.243334837 +0000 UTC m=+41.819039875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.751244 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-rppzh"] Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.831729 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.845059 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.845401 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.345387222 +0000 UTC m=+41.921092260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.949299 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:38 crc kubenswrapper[4763]: E1206 08:12:38.962562 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.462544467 +0000 UTC m=+42.038249505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:38 crc kubenswrapper[4763]: I1206 08:12:38.969415 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.034486 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-w4ffb" podStartSLOduration=19.034467399 podStartE2EDuration="19.034467399s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.032084105 +0000 UTC m=+41.607789153" watchObservedRunningTime="2025-12-06 08:12:39.034467399 +0000 UTC m=+41.610172437" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.057309 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.057688 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.557673626 +0000 UTC m=+42.133378664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.183568 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.183941 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.683928636 +0000 UTC m=+42.259633674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.184513 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-mt26w" podStartSLOduration=18.184487231 podStartE2EDuration="18.184487231s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.122069755 +0000 UTC m=+41.697774823" watchObservedRunningTime="2025-12-06 08:12:39.184487231 +0000 UTC m=+41.760192269" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.202060 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2df96"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.202650 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" podStartSLOduration=18.202631471 podStartE2EDuration="18.202631471s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.155705903 +0000 UTC m=+41.731410941" watchObservedRunningTime="2025-12-06 08:12:39.202631471 +0000 UTC m=+41.778336509" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.277424 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.284809 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.285284 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.785269133 +0000 UTC m=+42.360974171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.290775 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j"] Dec 06 08:12:39 crc kubenswrapper[4763]: W1206 08:12:39.319320 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0faf76f_17ec_4dca_a7a3_8b1f96b3b133.slice/crio-cbc09a0d0c3bb60abef141a28fddb24bd9f364f102b81a80773953b60ac53fe7 WatchSource:0}: Error finding container cbc09a0d0c3bb60abef141a28fddb24bd9f364f102b81a80773953b60ac53fe7: Status 404 returned error can't find the container with id cbc09a0d0c3bb60abef141a28fddb24bd9f364f102b81a80773953b60ac53fe7 Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.320965 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" podStartSLOduration=19.320935196 podStartE2EDuration="19.320935196s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.319764014 +0000 UTC m=+41.895469052" watchObservedRunningTime="2025-12-06 08:12:39.320935196 +0000 UTC m=+41.896640244" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.338556 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.341209 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qvrcf" podStartSLOduration=19.341194414 podStartE2EDuration="19.341194414s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.339110987 +0000 UTC m=+41.914816025" watchObservedRunningTime="2025-12-06 08:12:39.341194414 +0000 UTC m=+41.916899452" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.385906 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.386160 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.886149088 +0000 UTC m=+42.461854126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.387480 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" event={"ID":"0b77683f-2dfe-43e0-a5bf-ce618a203c50","Type":"ContainerStarted","Data":"57f0a95867699229045c63caf98a874c21cdfa16b9ac5de41c7ed118d1024bfa"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.423458 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.446288 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-xrrcl" podStartSLOduration=18.446272592 podStartE2EDuration="18.446272592s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.445038418 +0000 UTC m=+42.020743456" watchObservedRunningTime="2025-12-06 08:12:39.446272592 +0000 UTC m=+42.021977630" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.446915 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" podStartSLOduration=18.446910378 podStartE2EDuration="18.446910378s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.400555947 +0000 UTC m=+41.976261005" watchObservedRunningTime="2025-12-06 08:12:39.446910378 +0000 UTC m=+42.022615416" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.447969 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4xfrk" event={"ID":"69142441-e9e9-483d-b8ea-a6ad02792eab","Type":"ContainerStarted","Data":"0ce764e7e575ca1afaac498ea919d8f9c778d12391ff538d66de062eef7506a9"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.448696 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.452128 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" event={"ID":"2d9bc36e-6135-431d-9eac-eae00ee40a18","Type":"ContainerStarted","Data":"d7f5a506f4b1ff928be065d2e4031f8064d55e060d7fdab58ba8fbdfa16d05ac"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.462913 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" event={"ID":"041c7ad7-43e5-4dfc-898f-845db4f5fa2e","Type":"ContainerStarted","Data":"24bb6f16e67f19026bc75840ee3659bbc6c6280009eedccda201dac250f2aa98"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.474169 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" event={"ID":"6e007d3e-e776-4374-ba48-232d19ff421f","Type":"ContainerStarted","Data":"6f461b1f1188604b2190952877f292ef147904c9b20c752b462251046dc5dab6"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.479370 4763 patch_prober.go:28] interesting pod/downloads-7954f5f757-4xfrk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.479419 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4xfrk" podUID="69142441-e9e9-483d-b8ea-a6ad02792eab" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.482615 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bf86d"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.497530 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.497800 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:39.997778432 +0000 UTC m=+42.573483470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.503534 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" event={"ID":"d5457ee2-7607-40da-8cc5-b053a899760a","Type":"ContainerStarted","Data":"f476808394938c80abbd1222608de456b78e1ae0218e910230bd975ad59f5436"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.537959 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dbhnr"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.540826 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b9lvw"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.542137 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" podStartSLOduration=19.54211981 podStartE2EDuration="19.54211981s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.517269578 +0000 UTC m=+42.092974616" watchObservedRunningTime="2025-12-06 08:12:39.54211981 +0000 UTC m=+42.117824848" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.550226 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5wtjb" event={"ID":"1e42298d-84d9-4aca-9893-394efa00acec","Type":"ContainerStarted","Data":"7fe0c8e60719466040e5a7451ade678e2067ffc604eb476669b3b074fc0b3df1"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.556644 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-p68hx"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.568141 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.574279 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.580806 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w8f9q" podStartSLOduration=18.580779154 podStartE2EDuration="18.580779154s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.556536329 +0000 UTC m=+42.132241367" watchObservedRunningTime="2025-12-06 08:12:39.580779154 +0000 UTC m=+42.156484192" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.598907 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.600197 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.100185528 +0000 UTC m=+42.675890566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.608665 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.610854 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4xfrk" podStartSLOduration=18.610842076 podStartE2EDuration="18.610842076s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.595228654 +0000 UTC m=+42.170933692" watchObservedRunningTime="2025-12-06 08:12:39.610842076 +0000 UTC m=+42.186547114" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.612438 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8m2kx"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.623857 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" event={"ID":"a49a405c-5eb4-4e9e-9371-d4e00c2e8e04","Type":"ContainerStarted","Data":"4336ccd19ba4710aeeb66c997a7081bffb1fef300cb0e157d6eab16d5abd6499"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.658431 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w55pc" podStartSLOduration=18.658408111 podStartE2EDuration="18.658408111s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.636111018 +0000 UTC m=+42.211816066" watchObservedRunningTime="2025-12-06 08:12:39.658408111 +0000 UTC m=+42.234113149" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.675261 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.702848 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.704149 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" event={"ID":"954f9d07-1e99-48fa-bd6d-b61d22ee7faa","Type":"ContainerStarted","Data":"721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044"} Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.704925 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.204884556 +0000 UTC m=+42.780589584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.706722 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.754142 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:39 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:39 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:39 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.754217 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.754652 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-5wtjb" podStartSLOduration=6.75463408 podStartE2EDuration="6.75463408s" podCreationTimestamp="2025-12-06 08:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.694239639 +0000 UTC m=+42.269944677" watchObservedRunningTime="2025-12-06 08:12:39.75463408 +0000 UTC m=+42.330339118" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.782595 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-nzw6k" podStartSLOduration=18.782578384 podStartE2EDuration="18.782578384s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.77910304 +0000 UTC m=+42.354808078" watchObservedRunningTime="2025-12-06 08:12:39.782578384 +0000 UTC m=+42.358283422" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.802637 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.802735 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.804201 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.805306 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.305291488 +0000 UTC m=+42.880996526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.817514 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" event={"ID":"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91","Type":"ContainerStarted","Data":"3e7dec49dc2a7be7398cf4dbaa0fc159b59b3d8595d864fd1ffebeaf2f467867"} Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.829784 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podStartSLOduration=6.829760878 podStartE2EDuration="6.829760878s" podCreationTimestamp="2025-12-06 08:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.817368214 +0000 UTC m=+42.393073252" watchObservedRunningTime="2025-12-06 08:12:39.829760878 +0000 UTC m=+42.405465916" Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.835073 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.867021 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw"] Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.918654 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.919052 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.419025549 +0000 UTC m=+42.994730587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:39 crc kubenswrapper[4763]: I1206 08:12:39.920193 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:39 crc kubenswrapper[4763]: E1206 08:12:39.926937 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.426921593 +0000 UTC m=+43.002626621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.021931 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.022337 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.522321969 +0000 UTC m=+43.098027007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.123085 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.123363 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.623351098 +0000 UTC m=+43.199056126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.181873 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.226538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.227114 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.727095999 +0000 UTC m=+43.302801047 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.244857 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" podStartSLOduration=19.244842829 podStartE2EDuration="19.244842829s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:39.87314361 +0000 UTC m=+42.448848648" watchObservedRunningTime="2025-12-06 08:12:40.244842829 +0000 UTC m=+42.820547867" Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.333742 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.334096 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.834081139 +0000 UTC m=+43.409786177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.434369 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.434954 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:40.934938343 +0000 UTC m=+43.510643381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.541465 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.541795 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.041779859 +0000 UTC m=+43.617484897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.642648 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.642825 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.142792377 +0000 UTC m=+43.718497415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.643210 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.643543 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.143530487 +0000 UTC m=+43.719235525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.735832 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:40 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:40 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:40 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.735884 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.746188 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.746590 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.246577529 +0000 UTC m=+43.822282567 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.874662 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.880862 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.380833096 +0000 UTC m=+43.956538134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.885307 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" event={"ID":"6b9c2185-85d0-45db-bb41-44bbf526a10d","Type":"ContainerStarted","Data":"9ffdf8bb2f74ba35eeacb4180a1dc975e764b9f63b545f73bfc3363876443137"} Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.933566 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" event={"ID":"2c40e17b-58b1-423e-9d70-c29ec900e983","Type":"ContainerStarted","Data":"36b7902a91857920b2a8b38a3062d8fbac034d85c2205ecd298b689c0d6ed609"} Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.942583 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" event={"ID":"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8","Type":"ContainerStarted","Data":"57917b924e3cb36a8bc58bdfaee41c1da77732fc63341d1f8a7774e28b3a305d"} Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.963198 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" event={"ID":"976ea0cd-771c-4eb3-8163-87942bcf49f2","Type":"ContainerStarted","Data":"c4c82988389cc183b7309b0f206d600843abfa7697c2b6a30c0732f1396039ba"} Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.976520 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:40 crc kubenswrapper[4763]: E1206 08:12:40.976874 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.476858829 +0000 UTC m=+44.052563857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:40 crc kubenswrapper[4763]: I1206 08:12:40.997489 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" event={"ID":"5de53d3f-9ac0-4b3b-aa31-015ccdb83fda","Type":"ContainerStarted","Data":"44228a75a0813af0ab877cdb7f1f3fb311552c35c1c332b54dfa2bbfe7a5107b"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.015794 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" event={"ID":"50828027-891a-487f-acf3-cc7eba748959","Type":"ContainerStarted","Data":"422875e68222c4ce93eb1ec155a5731ce8593d7293e06e2c7582ab22f5081a44"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.048888 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" podStartSLOduration=20.048874884 podStartE2EDuration="20.048874884s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.044713032 +0000 UTC m=+43.620418070" watchObservedRunningTime="2025-12-06 08:12:41.048874884 +0000 UTC m=+43.624579922" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.060919 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" event={"ID":"75bdca6d-084a-4898-bf07-5371cf477720","Type":"ContainerStarted","Data":"097ae3394efacfe058aea71a96837690f404f85bce1b4394c35fba6f88c6f65b"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.081476 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.081799 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.581782603 +0000 UTC m=+44.157487641 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.083192 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" event={"ID":"6e007d3e-e776-4374-ba48-232d19ff421f","Type":"ContainerStarted","Data":"9c9645b416a7eec7b99085f369e19a0d4e20a2725c570643f50f5f3233713440"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.083981 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.094579 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" event={"ID":"1bcaf02d-6524-4656-8e17-46ad975fc850","Type":"ContainerStarted","Data":"ebce108d30b41c29c869f498facb67b52fc3dae293a784bf58a52eab7119e379"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.094620 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" event={"ID":"1bcaf02d-6524-4656-8e17-46ad975fc850","Type":"ContainerStarted","Data":"6e7df99cba06ab4b8ee2612c906fb90d94d6dc7ba607d74301c65f0fa394c1ad"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.117526 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s62v" event={"ID":"0d8b8c08-d283-4ca2-aed3-2fcb7637ac91","Type":"ContainerStarted","Data":"4c3d69ef79489b6c00f9ee690cefe5e9fe02871e57e3e34d20d83c39c465f825"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.138227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" event={"ID":"28ac2e3e-807e-4d30-8775-de3438b3dee5","Type":"ContainerStarted","Data":"08f1edbbee2267939e67400a373afea4df1d145eb49332d8c447a4db92ec9c00"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.165670 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" event={"ID":"bf456eda-cf8e-4084-a34e-2f8cdcac6f11","Type":"ContainerStarted","Data":"697656b41560e69dcd31c2c21a95009d781b1be674e665b8235b99279cfff712"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.165716 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" event={"ID":"bf456eda-cf8e-4084-a34e-2f8cdcac6f11","Type":"ContainerStarted","Data":"254e18be47ee57b8aff5d4e5585aa3c42c7a658d0a30149aa9df448bc6560781"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.167493 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.178366 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" event={"ID":"c726fd5f-7588-4b80-843b-b9f864be53ea","Type":"ContainerStarted","Data":"7544640d9b3b579addd91de0445aa2a4c9ad825fd05fd6faad88d2a330f05447"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.178436 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" event={"ID":"c726fd5f-7588-4b80-843b-b9f864be53ea","Type":"ContainerStarted","Data":"5a3bd3222a71fa9597dab46fcbc25d1acbe3dd84c8b42f04e2355692a0bf603e"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.178784 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.180159 4763 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gxwtv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.180195 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.183051 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.184013 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.683998034 +0000 UTC m=+44.259703072 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.189396 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" event={"ID":"f1cc0acf-7876-428e-8430-a14d2498a435","Type":"ContainerStarted","Data":"14488aa2d6f725f64948eb9524dd6f7ad4676288d31261e5e1b5001020b4d54b"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.225855 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" event={"ID":"8a286baf-47df-45d6-9ad3-25868bf62367","Type":"ContainerStarted","Data":"d5dc82bd0b693455d45735cc5cbacd4edee2a7a183e57d2808f540763e4b93f9"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.226107 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" event={"ID":"8a286baf-47df-45d6-9ad3-25868bf62367","Type":"ContainerStarted","Data":"2e30d781346df699db5d6ebaaea5b71a10e1092ee5b8ed4f8639cbd6580d4da7"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.228405 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sfrgp" podStartSLOduration=20.228394822 podStartE2EDuration="20.228394822s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.122923294 +0000 UTC m=+43.698628332" watchObservedRunningTime="2025-12-06 08:12:41.228394822 +0000 UTC m=+43.804099860" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.240626 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.240706 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.260361 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8m2kx" event={"ID":"4323dce1-5bfd-48a6-ba50-85a35f5a53f7","Type":"ContainerStarted","Data":"6d1685628b7352a2b7840fccb903d0da8691e2708fd4bf3c23829cd4966e47b5"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.273120 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.287239 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.287554 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.78754225 +0000 UTC m=+44.363247288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.294850 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" event={"ID":"aee81022-2ddd-4213-9010-38b1141fc399","Type":"ContainerStarted","Data":"58379480bd5c2305fdac858b3bc0a628ee805bfcfed57493ca80ac41ca6edfca"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.295643 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.332250 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.332540 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" podStartSLOduration=20.332522715 podStartE2EDuration="20.332522715s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.331344653 +0000 UTC m=+43.907049691" watchObservedRunningTime="2025-12-06 08:12:41.332522715 +0000 UTC m=+43.908227753" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.334172 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-p68hx" podStartSLOduration=20.334163289 podStartE2EDuration="20.334163289s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.240030497 +0000 UTC m=+43.815735555" watchObservedRunningTime="2025-12-06 08:12:41.334163289 +0000 UTC m=+43.909868317" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.359878 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" event={"ID":"6759c28b-a57c-4263-980a-0a8476e579dc","Type":"ContainerStarted","Data":"580cb4918ae2dd8b2697a87e0cb9238efc0d42dc879230f9e0c08a433a67b716"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.389124 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.389723 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.88969923 +0000 UTC m=+44.465404268 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.389867 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.391530 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.891518008 +0000 UTC m=+44.467223036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.400155 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" event={"ID":"e35ce166-8cb5-4419-b4db-09f13a65daf2","Type":"ContainerStarted","Data":"53ac10c9ff831a1ac4eb6d4c0e72c4f91d452cdbe31b94e25f789d6d35d34d44"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.416309 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" event={"ID":"db212622-3d09-4dac-9144-e509e64a9b48","Type":"ContainerStarted","Data":"56ce0cc060671758f4c653f457bedcab0b43d23d1b3a54faf2fc2f8f0b39d530"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.427150 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" event={"ID":"71f89833-600c-4231-bc08-2a784591e6d8","Type":"ContainerStarted","Data":"08379ec0ded0ebcaf376976046202777d8f3f2d4effcec9b69f2b13d7da1d77b"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.432023 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-znd8z" podStartSLOduration=20.432007682 podStartE2EDuration="20.432007682s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.426820482 +0000 UTC m=+44.002525520" watchObservedRunningTime="2025-12-06 08:12:41.432007682 +0000 UTC m=+44.007712720" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.451610 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2df96" event={"ID":"75d01498-9991-4f99-83ac-c24fdee94ebe","Type":"ContainerStarted","Data":"e8e833483c9c02203ea1b3c5b9c2ca4b005b873503c6b18703a77809337dfdc3"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.451649 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2df96" event={"ID":"75d01498-9991-4f99-83ac-c24fdee94ebe","Type":"ContainerStarted","Data":"e0e1e702dd4192cd82d2c0508970b0d0f234aecd75a93ba3497383e46e314ed7"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.470865 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" event={"ID":"d5457ee2-7607-40da-8cc5-b053a899760a","Type":"ContainerStarted","Data":"6f3815ac4b1a67c43d2056f3af42e002d967ec76b2b646c8d08985a54825bf3c"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.483128 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bf86d" event={"ID":"b0833380-d571-4a7e-8330-b6b88b7ffc3c","Type":"ContainerStarted","Data":"77f70cf4cf771080b759d74b48525342212f763cb88cc99a0d806b74d7a75e9e"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.483200 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bf86d" event={"ID":"b0833380-d571-4a7e-8330-b6b88b7ffc3c","Type":"ContainerStarted","Data":"11ec628cbcd9a79c6fa6986bdad9bcc60adcae68eb3eb32e0a1b29f5908d44a6"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.484081 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.493993 4763 patch_prober.go:28] interesting pod/console-operator-58897d9998-bf86d container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/readyz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.494059 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bf86d" podUID="b0833380-d571-4a7e-8330-b6b88b7ffc3c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/readyz\": dial tcp 10.217.0.35:8443: connect: connection refused" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.494941 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.495959 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:41.995935698 +0000 UTC m=+44.571640736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.497391 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7j4g2" event={"ID":"1aeea93c-2fff-4930-b63f-cd11cda5d8a0","Type":"ContainerStarted","Data":"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.497422 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7j4g2" event={"ID":"1aeea93c-2fff-4930-b63f-cd11cda5d8a0","Type":"ContainerStarted","Data":"c2b9931cbc3e3bb0e60d5c7ac629049c903e03a14109e328226e09de4dceeb43"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.521404 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p2rk6" event={"ID":"dcd65fd5-43dc-42a9-84d9-e37bb8e220af","Type":"ContainerStarted","Data":"0484e3d00e4b13e889e2a9caa345ba612876230274af4c431272639680cf4ff0"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.529890 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" podStartSLOduration=21.529871685 podStartE2EDuration="21.529871685s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.502476165 +0000 UTC m=+44.078181203" watchObservedRunningTime="2025-12-06 08:12:41.529871685 +0000 UTC m=+44.105576723" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.530051 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" event={"ID":"f46918db-01bb-47d9-9290-cbda03d34cfc","Type":"ContainerStarted","Data":"e015d6035841e89d672cf172bcc086938d8be39d745ab610336a680fd5ea2068"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.530085 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-prdgw" podStartSLOduration=20.530080811 podStartE2EDuration="20.530080811s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.529289749 +0000 UTC m=+44.104994787" watchObservedRunningTime="2025-12-06 08:12:41.530080811 +0000 UTC m=+44.105785849" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.543177 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" event={"ID":"d2408364-68f3-4d8d-9cce-22a25d841f6d","Type":"ContainerStarted","Data":"5d21b4dfda2e585c4d400ec887702b570dc3c05d687a4fb111482d448d78d46c"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.543216 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" event={"ID":"d2408364-68f3-4d8d-9cce-22a25d841f6d","Type":"ContainerStarted","Data":"4bcf6ebee076aa59d46bd101c8d60c1d5e46789d4c623f61ec51baad8387d243"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.553083 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" podStartSLOduration=20.553066121 podStartE2EDuration="20.553066121s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.552703712 +0000 UTC m=+44.128408750" watchObservedRunningTime="2025-12-06 08:12:41.553066121 +0000 UTC m=+44.128771159" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.577206 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" event={"ID":"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133","Type":"ContainerStarted","Data":"4c987dbc47f5ae4b3c19e5a71c61635818637d4ad48f9f9e269fbc9e046e7d44"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.577253 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" event={"ID":"d0faf76f-17ec-4dca-a7a3-8b1f96b3b133","Type":"ContainerStarted","Data":"cbc09a0d0c3bb60abef141a28fddb24bd9f364f102b81a80773953b60ac53fe7"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.578149 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.579044 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-p2rk6" podStartSLOduration=20.579034003 podStartE2EDuration="20.579034003s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.578540459 +0000 UTC m=+44.154245497" watchObservedRunningTime="2025-12-06 08:12:41.579034003 +0000 UTC m=+44.154739041" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.592335 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.595955 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" event={"ID":"002fe870-57dd-4d32-ad54-a093ca95c088","Type":"ContainerStarted","Data":"bc9218cfbcae59be622100b9ecd4973bdd0d79c07a24c600fa70b168c265183d"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.596013 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" event={"ID":"002fe870-57dd-4d32-ad54-a093ca95c088","Type":"ContainerStarted","Data":"8ba463a9e93c6fd04c8e87df8512ab3ce68616c754b87a8f509a72fb0ce0a0a0"} Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.597040 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.597297 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.097286326 +0000 UTC m=+44.672991364 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.603682 4763 patch_prober.go:28] interesting pod/downloads-7954f5f757-4xfrk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.603737 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4xfrk" podUID="69142441-e9e9-483d-b8ea-a6ad02792eab" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.624373 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7d7bl" podStartSLOduration=20.624351296 podStartE2EDuration="20.624351296s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.624194502 +0000 UTC m=+44.199899540" watchObservedRunningTime="2025-12-06 08:12:41.624351296 +0000 UTC m=+44.200056334" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.692607 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.703191 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.705051 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.205030196 +0000 UTC m=+44.780735304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.736671 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" podStartSLOduration=20.736649559 podStartE2EDuration="20.736649559s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.731822409 +0000 UTC m=+44.307527467" watchObservedRunningTime="2025-12-06 08:12:41.736649559 +0000 UTC m=+44.312354597" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.749823 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:41 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:41 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:41 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.749885 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.768873 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q4sl9" podStartSLOduration=21.768847739 podStartE2EDuration="21.768847739s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.766889206 +0000 UTC m=+44.342594244" watchObservedRunningTime="2025-12-06 08:12:41.768847739 +0000 UTC m=+44.344552787" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.793920 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-bf86d" podStartSLOduration=21.793888525 podStartE2EDuration="21.793888525s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.790262158 +0000 UTC m=+44.365967196" watchObservedRunningTime="2025-12-06 08:12:41.793888525 +0000 UTC m=+44.369593563" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.806148 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.806433 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.306422444 +0000 UTC m=+44.882127482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.822026 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-2df96" podStartSLOduration=8.822011015 podStartE2EDuration="8.822011015s" podCreationTimestamp="2025-12-06 08:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.820800532 +0000 UTC m=+44.396505570" watchObservedRunningTime="2025-12-06 08:12:41.822011015 +0000 UTC m=+44.397716053" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.855793 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-7j4g2" podStartSLOduration=20.855774316 podStartE2EDuration="20.855774316s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.846054255 +0000 UTC m=+44.421759293" watchObservedRunningTime="2025-12-06 08:12:41.855774316 +0000 UTC m=+44.431479354" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.886760 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8qvr9" podStartSLOduration=20.886745594 podStartE2EDuration="20.886745594s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.885426828 +0000 UTC m=+44.461131866" watchObservedRunningTime="2025-12-06 08:12:41.886745594 +0000 UTC m=+44.462450632" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.887829 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qljhm" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.911612 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mpq2j" podStartSLOduration=20.911592054 podStartE2EDuration="20.911592054s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:41.910867515 +0000 UTC m=+44.486572553" watchObservedRunningTime="2025-12-06 08:12:41.911592054 +0000 UTC m=+44.487297092" Dec 06 08:12:41 crc kubenswrapper[4763]: I1206 08:12:41.911858 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:41 crc kubenswrapper[4763]: E1206 08:12:41.912110 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.412092278 +0000 UTC m=+44.987797316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.015956 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.016332 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.516315943 +0000 UTC m=+45.092020981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.116992 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.117271 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.617255619 +0000 UTC m=+45.192960657 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.217983 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.218297 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.718284297 +0000 UTC m=+45.293989335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.314518 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gtpp" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.319123 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.319295 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.819255664 +0000 UTC m=+45.394960702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.319412 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.319667 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.819659136 +0000 UTC m=+45.395364174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.372701 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.373656 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.376672 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.396670 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.420383 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.420557 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.420588 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.420627 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pcdd\" (UniqueName: \"kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.420732 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:42.920718475 +0000 UTC m=+45.496423513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.521719 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.521763 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.521807 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pcdd\" (UniqueName: \"kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.521829 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.522116 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.022100733 +0000 UTC m=+45.597805771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.522556 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.522760 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.559308 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pcdd\" (UniqueName: \"kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd\") pod \"certified-operators-glwdn\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.566299 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.574380 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.585701 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.618952 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.622951 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.623137 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75d7t\" (UniqueName: \"kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.623178 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.623210 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.623334 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.123320657 +0000 UTC m=+45.699025695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.699479 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-ln8pp"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.699679 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" event={"ID":"28ac2e3e-807e-4d30-8775-de3438b3dee5","Type":"ContainerStarted","Data":"0f945da6fed7dd09423b5358fef22edf7decba1624585070963a1635ecb35901"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.699893 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.731216 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.731333 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75d7t\" (UniqueName: \"kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.731358 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.731381 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.732040 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.732250 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.732734 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.232716941 +0000 UTC m=+45.808421979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.756871 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" event={"ID":"d5457ee2-7607-40da-8cc5-b053a899760a","Type":"ContainerStarted","Data":"59996211529c7200176ce03440efaf0dcb8909aa135d245a5f06d7b000c08054"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.757854 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:42 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:42 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:42 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.757881 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.769585 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75d7t\" (UniqueName: \"kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t\") pod \"community-operators-gntcq\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.784769 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.785669 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.805185 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-rppzh" podStartSLOduration=21.805157358 podStartE2EDuration="21.805157358s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:42.795479327 +0000 UTC m=+45.371184365" watchObservedRunningTime="2025-12-06 08:12:42.805157358 +0000 UTC m=+45.380862396" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.807599 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.826910 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8m2kx" event={"ID":"4323dce1-5bfd-48a6-ba50-85a35f5a53f7","Type":"ContainerStarted","Data":"3cd444d1ba0c223ce586c5e985bf4fccd4132d5bee8f520649fbff7e46964cf5"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.826950 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8m2kx" event={"ID":"4323dce1-5bfd-48a6-ba50-85a35f5a53f7","Type":"ContainerStarted","Data":"991a87ee0505c4e220aa98918387b497836e668a81c83b9cd798e1d85270ba82"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.827532 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.831767 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.832357 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.332342482 +0000 UTC m=+45.908047520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.860761 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" event={"ID":"71f89833-600c-4231-bc08-2a784591e6d8","Type":"ContainerStarted","Data":"3d0bbf7b2ab429f9364490464cf0d599043cfb9b0210b9d28e64bc42d5574c79"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.860812 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" event={"ID":"71f89833-600c-4231-bc08-2a784591e6d8","Type":"ContainerStarted","Data":"75f53c01bddf38c08fb2bd5046337c3eb057d4bfd915285a388c52a53386fcfd"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.873555 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" event={"ID":"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8","Type":"ContainerStarted","Data":"1cd37c8afbc77b5550bb788d26d22b55874ef3b1920d5f606a0b0b3d14985898"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.873598 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" event={"ID":"6adfe482-7fa8-4992-bf3a-efae3f8eb8f8","Type":"ContainerStarted","Data":"52abaa1a608587d0a269b9d4882dbd1a37498e4d6ed199f1a5e4b5ff7c1bc811"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.883951 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" event={"ID":"6759c28b-a57c-4263-980a-0a8476e579dc","Type":"ContainerStarted","Data":"ae98f0ce681ccf97103c0ce83f370118c06250f5d40b21064cb1cdc9390804a6"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.883990 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" event={"ID":"6759c28b-a57c-4263-980a-0a8476e579dc","Type":"ContainerStarted","Data":"dc8d037e3c182ec3b3e413286228de1d7071698e53700c4788ab602b8062e94c"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.885540 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pcqzd" event={"ID":"f46918db-01bb-47d9-9290-cbda03d34cfc","Type":"ContainerStarted","Data":"85ef56876d10924a328be8729822fb9dd72da3b4f025121af81570cdfaa07d69"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.886821 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" event={"ID":"50828027-891a-487f-acf3-cc7eba748959","Type":"ContainerStarted","Data":"798a1132a8a47cf4f49dc0507ca0aec02f686b8bb91fbc78136bf5897408b74d"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.886851 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" event={"ID":"50828027-891a-487f-acf3-cc7eba748959","Type":"ContainerStarted","Data":"c92d806720df10cc33e21af9880685825592fe3b3acfb85e54efb18f12595570"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.887311 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.922165 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" event={"ID":"db212622-3d09-4dac-9144-e509e64a9b48","Type":"ContainerStarted","Data":"f2cc2ca983c6008947e8dea0f79c30ffff8d67c586aceda7228ec0ba21650d3f"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.924969 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.935353 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc2f6\" (UniqueName: \"kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.935403 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.935448 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.935569 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:42 crc kubenswrapper[4763]: E1206 08:12:42.938810 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.438796717 +0000 UTC m=+46.014501755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.945141 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" event={"ID":"f1cc0acf-7876-428e-8430-a14d2498a435","Type":"ContainerStarted","Data":"e07c7d697b605fc72cff22475614c8ddc12c849c8fac575ff2d8d53b303a33d2"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.954599 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-58vnh" podStartSLOduration=22.954583733 podStartE2EDuration="22.954583733s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:42.954295986 +0000 UTC m=+45.530001024" watchObservedRunningTime="2025-12-06 08:12:42.954583733 +0000 UTC m=+45.530288771" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.955080 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-8m2kx" podStartSLOduration=8.955076287 podStartE2EDuration="8.955076287s" podCreationTimestamp="2025-12-06 08:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:42.916293759 +0000 UTC m=+45.491998797" watchObservedRunningTime="2025-12-06 08:12:42.955076287 +0000 UTC m=+45.530781325" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.963418 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p2rk6" event={"ID":"dcd65fd5-43dc-42a9-84d9-e37bb8e220af","Type":"ContainerStarted","Data":"554c46476be430b4ce27614cc3287dbb5cd53c06f26fa06673a05fbe31d04fce"} Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.973676 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.974567 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:42 crc kubenswrapper[4763]: I1206 08:12:42.975773 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" event={"ID":"976ea0cd-771c-4eb3-8163-87942bcf49f2","Type":"ContainerStarted","Data":"7a18fb1355d672a62c0064b27b5c91bf054c0d36069310200c2d663563f60879"} Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.010614 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" event={"ID":"6b9c2185-85d0-45db-bb41-44bbf526a10d","Type":"ContainerStarted","Data":"91c0f99296379fa981f13d388a764e5e155eab1404cebe58a023fa7a74c8b773"} Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.010653 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" event={"ID":"6b9c2185-85d0-45db-bb41-44bbf526a10d","Type":"ContainerStarted","Data":"295df4f89f75c8b529153a8b9336c7d3edf3641c6847e7b0bc6f49fd12fecd33"} Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.012981 4763 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gxwtv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.013018 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.014125 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dbhnr" podStartSLOduration=22.014105641 podStartE2EDuration="22.014105641s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.011002628 +0000 UTC m=+45.586707666" watchObservedRunningTime="2025-12-06 08:12:43.014105641 +0000 UTC m=+45.589810679" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.014311 4763 patch_prober.go:28] interesting pod/downloads-7954f5f757-4xfrk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.014331 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4xfrk" podUID="69142441-e9e9-483d-b8ea-a6ad02792eab" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.015977 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.030331 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6xcs6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.030991 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-bf86d" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038127 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038328 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc2f6\" (UniqueName: \"kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038376 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038441 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038540 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.038793 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frnq9\" (UniqueName: \"kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.039006 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.039492 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.539469056 +0000 UTC m=+46.115174144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.041816 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.042714 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.102097 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc2f6\" (UniqueName: \"kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6\") pod \"certified-operators-7gjl5\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.136165 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.144598 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frnq9\" (UniqueName: \"kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.144645 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.144678 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.144711 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.145160 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.145671 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.645658194 +0000 UTC m=+46.221363232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.146043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.173764 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9snzx" podStartSLOduration=22.173747843 podStartE2EDuration="22.173747843s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.172393746 +0000 UTC m=+45.748098784" watchObservedRunningTime="2025-12-06 08:12:43.173747843 +0000 UTC m=+45.749452881" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.173862 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" podStartSLOduration=23.173859366 podStartE2EDuration="23.173859366s" podCreationTimestamp="2025-12-06 08:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.135066588 +0000 UTC m=+45.710771626" watchObservedRunningTime="2025-12-06 08:12:43.173859366 +0000 UTC m=+45.749564404" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.194585 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frnq9\" (UniqueName: \"kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9\") pod \"community-operators-p7hj6\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.216400 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pflsw" podStartSLOduration=22.216382544 podStartE2EDuration="22.216382544s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.21477098 +0000 UTC m=+45.790476018" watchObservedRunningTime="2025-12-06 08:12:43.216382544 +0000 UTC m=+45.792087582" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.290538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.295694 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.795668806 +0000 UTC m=+46.371373844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.295855 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.296205 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.79619711 +0000 UTC m=+46.371902148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.301344 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.317785 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q629c" podStartSLOduration=22.317766733 podStartE2EDuration="22.317766733s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.317685471 +0000 UTC m=+45.893390509" watchObservedRunningTime="2025-12-06 08:12:43.317766733 +0000 UTC m=+45.893471771" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.319157 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" podStartSLOduration=22.31914983 podStartE2EDuration="22.31914983s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.291330179 +0000 UTC m=+45.867035217" watchObservedRunningTime="2025-12-06 08:12:43.31914983 +0000 UTC m=+45.894854888" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.397350 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.397668 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:43.89765243 +0000 UTC m=+46.473357468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.433083 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mtn5" podStartSLOduration=22.433067086 podStartE2EDuration="22.433067086s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:43.430299572 +0000 UTC m=+46.006004610" watchObservedRunningTime="2025-12-06 08:12:43.433067086 +0000 UTC m=+46.008772124" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.500304 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.500828 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.000815716 +0000 UTC m=+46.576520754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.601483 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.601852 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.101838915 +0000 UTC m=+46.677543943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.703535 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.703926 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.203913311 +0000 UTC m=+46.779618349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.731264 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:43 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:43 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:43 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.731319 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.797141 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.805351 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.805505 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.305485795 +0000 UTC m=+46.881190833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.805605 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.805835 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.305828604 +0000 UTC m=+46.881533642 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.820694 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:12:43 crc kubenswrapper[4763]: W1206 08:12:43.843768 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49651d40_4e4d_442a_9421_ed157e45ce24.slice/crio-6175e615fb141ca87af51bebcc09c61efdc1d1913ffe3856d5da28a95b6b7f6f WatchSource:0}: Error finding container 6175e615fb141ca87af51bebcc09c61efdc1d1913ffe3856d5da28a95b6b7f6f: Status 404 returned error can't find the container with id 6175e615fb141ca87af51bebcc09c61efdc1d1913ffe3856d5da28a95b6b7f6f Dec 06 08:12:43 crc kubenswrapper[4763]: W1206 08:12:43.846806 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33bff8df_dbde_4ad7_8edc_18b8848bd87e.slice/crio-e19ac4749f3da2571409810f1459ec3e0ee5664b00907412e1333bc5c2be4f5b WatchSource:0}: Error finding container e19ac4749f3da2571409810f1459ec3e0ee5664b00907412e1333bc5c2be4f5b: Status 404 returned error can't find the container with id e19ac4749f3da2571409810f1459ec3e0ee5664b00907412e1333bc5c2be4f5b Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.886299 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.906332 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.906500 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.406473142 +0000 UTC m=+46.982178180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.906604 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:43 crc kubenswrapper[4763]: E1206 08:12:43.906989 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-06 08:12:44.406981665 +0000 UTC m=+46.982686703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vlddl" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 06 08:12:43 crc kubenswrapper[4763]: W1206 08:12:43.922180 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0dcae7a8_a85b_4cb4_89d5_39f169eb2dff.slice/crio-b089e2ad53b0ade5914cd18b580ad82eddb6e0578b08b480107a086f8b93f9ad WatchSource:0}: Error finding container b089e2ad53b0ade5914cd18b580ad82eddb6e0578b08b480107a086f8b93f9ad: Status 404 returned error can't find the container with id b089e2ad53b0ade5914cd18b580ad82eddb6e0578b08b480107a086f8b93f9ad Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.929844 4763 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.967555 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:12:43 crc kubenswrapper[4763]: I1206 08:12:43.983105 4763 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-06T08:12:43.929866684Z","Handler":null,"Name":""} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.006461 4763 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.006506 4763 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.007454 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.016207 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.034533 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerStarted","Data":"b089e2ad53b0ade5914cd18b580ad82eddb6e0578b08b480107a086f8b93f9ad"} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.036545 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7gjl5" event={"ID":"20cce31d-dc9e-4669-830a-2663bde5c655","Type":"ContainerStarted","Data":"285e1e7861de1b9a011361f914ffa69f946fa599e52d2dbda65214a8484cf7cf"} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.037328 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerStarted","Data":"6175e615fb141ca87af51bebcc09c61efdc1d1913ffe3856d5da28a95b6b7f6f"} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.038251 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-glwdn" event={"ID":"33bff8df-dbde-4ad7-8edc-18b8848bd87e","Type":"ContainerStarted","Data":"e19ac4749f3da2571409810f1459ec3e0ee5664b00907412e1333bc5c2be4f5b"} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.040386 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" event={"ID":"28ac2e3e-807e-4d30-8775-de3438b3dee5","Type":"ContainerStarted","Data":"81a3d757a90b957cea49fb7a14191e5d2847e8a807b0c45e88ea87d27e507f52"} Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.043812 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" gracePeriod=30 Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.048302 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.108758 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.111387 4763 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.111413 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.564412 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.565528 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.567608 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.581559 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.716105 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.716250 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.716280 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9r24\" (UniqueName: \"kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.721662 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vlddl\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.728227 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:44 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:44 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:44 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.728295 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.785196 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.817286 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.817402 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.817423 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9r24\" (UniqueName: \"kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.818222 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.818260 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.837345 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9r24\" (UniqueName: \"kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24\") pod \"redhat-marketplace-mdc4f\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.878526 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.962129 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.963060 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:44 crc kubenswrapper[4763]: I1206 08:12:44.980074 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.001670 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.002429 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.004288 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.004529 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.004783 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.061774 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.078689 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:12:45 crc kubenswrapper[4763]: W1206 08:12:45.089304 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef3a25db_4a5f_4fb2_88c3_6dfd2ce24d6d.slice/crio-29f0640c6e741a8a1e0200b8b39b228690999e97849dd6048d64ae315476434b WatchSource:0}: Error finding container 29f0640c6e741a8a1e0200b8b39b228690999e97849dd6048d64ae315476434b: Status 404 returned error can't find the container with id 29f0640c6e741a8a1e0200b8b39b228690999e97849dd6048d64ae315476434b Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.120802 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdwzr\" (UniqueName: \"kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.121104 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.121130 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.121197 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.121256 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.221818 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.221923 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.221982 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.222080 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.222262 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdwzr\" (UniqueName: \"kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.222476 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.222739 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.223043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.242796 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.244162 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdwzr\" (UniqueName: \"kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr\") pod \"redhat-marketplace-rb2sc\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.296017 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.320630 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.489414 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.489687 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:12:45 crc kubenswrapper[4763]: W1206 08:12:45.499808 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc4bb178_0642_4fd9_8a54_1cfe3de5c3bf.slice/crio-83b428752683031cb1aff83723bba67a6aabb50cf01080e490d40c54d80551d7 WatchSource:0}: Error finding container 83b428752683031cb1aff83723bba67a6aabb50cf01080e490d40c54d80551d7: Status 404 returned error can't find the container with id 83b428752683031cb1aff83723bba67a6aabb50cf01080e490d40c54d80551d7 Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.500449 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.535262 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.560074 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.571657 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.577098 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.580177 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.591649 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=0.591627135 podStartE2EDuration="591.627135ms" podCreationTimestamp="2025-12-06 08:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:45.591619145 +0000 UTC m=+48.167324203" watchObservedRunningTime="2025-12-06 08:12:45.591627135 +0000 UTC m=+48.167332173" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.727278 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.727334 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2skqf\" (UniqueName: \"kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.727352 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.727356 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:45 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:45 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:45 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.727426 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.737249 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.828045 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2skqf\" (UniqueName: \"kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.828088 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.828184 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.828699 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.829029 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.847413 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2skqf\" (UniqueName: \"kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf\") pod \"redhat-operators-ssrkx\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.903794 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.963838 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.965079 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:45 crc kubenswrapper[4763]: I1206 08:12:45.983289 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.057678 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerStarted","Data":"83b428752683031cb1aff83723bba67a6aabb50cf01080e490d40c54d80551d7"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.059340 4763 generic.go:334] "Generic (PLEG): container finished" podID="f1cc0acf-7876-428e-8430-a14d2498a435" containerID="e07c7d697b605fc72cff22475614c8ddc12c849c8fac575ff2d8d53b303a33d2" exitCode=0 Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.059412 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" event={"ID":"f1cc0acf-7876-428e-8430-a14d2498a435","Type":"ContainerDied","Data":"e07c7d697b605fc72cff22475614c8ddc12c849c8fac575ff2d8d53b303a33d2"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.062084 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerStarted","Data":"29f0640c6e741a8a1e0200b8b39b228690999e97849dd6048d64ae315476434b"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.065690 4763 generic.go:334] "Generic (PLEG): container finished" podID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerID="9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08" exitCode=0 Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.065751 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-glwdn" event={"ID":"33bff8df-dbde-4ad7-8edc-18b8848bd87e","Type":"ContainerDied","Data":"9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.070767 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" event={"ID":"7d3f1284-20c9-4aa5-9c45-3cc96943980c","Type":"ContainerStarted","Data":"fa4f7518652394bad2320d85552c9bcb51d883dc7e42167dc3b291e197e64215"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.078183 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3284dbe1-107d-4881-87f3-05529f91e507","Type":"ContainerStarted","Data":"c2b4a83c4be2aaf5e07e625b061071b132be98ee78d94f7e5433a49b9c4a47be"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.082537 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerStarted","Data":"3d2968cf875443dd4c64b5c639c3e62d3cb00cf67459f6cc7401e358a359b810"} Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.117290 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:12:46 crc kubenswrapper[4763]: W1206 08:12:46.121626 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cf352a5_4869_4b34_951c_ccd1c1da1fb2.slice/crio-f4c32c38bdd0f56773ddc8fdb762a14006109d96f63927a06263ec4cc3eead89 WatchSource:0}: Error finding container f4c32c38bdd0f56773ddc8fdb762a14006109d96f63927a06263ec4cc3eead89: Status 404 returned error can't find the container with id f4c32c38bdd0f56773ddc8fdb762a14006109d96f63927a06263ec4cc3eead89 Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.135111 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.135168 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.135240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwptg\" (UniqueName: \"kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.161509 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.167746 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.237034 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.237137 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.237206 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwptg\" (UniqueName: \"kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.237859 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.237979 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.259375 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwptg\" (UniqueName: \"kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg\") pod \"redhat-operators-92drr\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.389955 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.595570 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:12:46 crc kubenswrapper[4763]: W1206 08:12:46.602844 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod411a9cc4_5c2c_4060_8a7d_f07b95ee6b0d.slice/crio-7e1588f327492797d74dfc6c0da2a3195f53489d6a0e54c2d01685057a8be7a4 WatchSource:0}: Error finding container 7e1588f327492797d74dfc6c0da2a3195f53489d6a0e54c2d01685057a8be7a4: Status 404 returned error can't find the container with id 7e1588f327492797d74dfc6c0da2a3195f53489d6a0e54c2d01685057a8be7a4 Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.625077 4763 patch_prober.go:28] interesting pod/downloads-7954f5f757-4xfrk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.625149 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4xfrk" podUID="69142441-e9e9-483d-b8ea-a6ad02792eab" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.625088 4763 patch_prober.go:28] interesting pod/downloads-7954f5f757-4xfrk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.625243 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4xfrk" podUID="69142441-e9e9-483d-b8ea-a6ad02792eab" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.724274 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.727449 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:46 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:46 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:46 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:46 crc kubenswrapper[4763]: I1206 08:12:46.727494 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.089225 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" event={"ID":"28ac2e3e-807e-4d30-8775-de3438b3dee5","Type":"ContainerStarted","Data":"d4a160fa812d214434eaab5579c755b6537fe216079d6ac3f88e5e58dda7c884"} Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.091211 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerStarted","Data":"f4c32c38bdd0f56773ddc8fdb762a14006109d96f63927a06263ec4cc3eead89"} Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.092778 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerStarted","Data":"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5"} Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.100015 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerStarted","Data":"7e1588f327492797d74dfc6c0da2a3195f53489d6a0e54c2d01685057a8be7a4"} Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.106464 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-l9dxz" Dec 06 08:12:47 crc kubenswrapper[4763]: E1206 08:12:47.195563 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:47 crc kubenswrapper[4763]: E1206 08:12:47.197302 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:47 crc kubenswrapper[4763]: E1206 08:12:47.199316 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:47 crc kubenswrapper[4763]: E1206 08:12:47.199353 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.318263 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.318299 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.333142 4763 patch_prober.go:28] interesting pod/console-f9d7485db-7j4g2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.333196 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7j4g2" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.478226 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.555653 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume\") pod \"f1cc0acf-7876-428e-8430-a14d2498a435\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.555745 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb9ct\" (UniqueName: \"kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct\") pod \"f1cc0acf-7876-428e-8430-a14d2498a435\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.555789 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume\") pod \"f1cc0acf-7876-428e-8430-a14d2498a435\" (UID: \"f1cc0acf-7876-428e-8430-a14d2498a435\") " Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.557346 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume" (OuterVolumeSpecName: "config-volume") pod "f1cc0acf-7876-428e-8430-a14d2498a435" (UID: "f1cc0acf-7876-428e-8430-a14d2498a435"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.564179 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct" (OuterVolumeSpecName: "kube-api-access-fb9ct") pod "f1cc0acf-7876-428e-8430-a14d2498a435" (UID: "f1cc0acf-7876-428e-8430-a14d2498a435"). InnerVolumeSpecName "kube-api-access-fb9ct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.564221 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f1cc0acf-7876-428e-8430-a14d2498a435" (UID: "f1cc0acf-7876-428e-8430-a14d2498a435"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.656960 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb9ct\" (UniqueName: \"kubernetes.io/projected/f1cc0acf-7876-428e-8430-a14d2498a435-kube-api-access-fb9ct\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.656991 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1cc0acf-7876-428e-8430-a14d2498a435-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.657002 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1cc0acf-7876-428e-8430-a14d2498a435-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.732092 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:47 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:47 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:47 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.732151 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.740160 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 08:12:47 crc kubenswrapper[4763]: E1206 08:12:47.740374 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1cc0acf-7876-428e-8430-a14d2498a435" containerName="collect-profiles" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.740389 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1cc0acf-7876-428e-8430-a14d2498a435" containerName="collect-profiles" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.740504 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1cc0acf-7876-428e-8430-a14d2498a435" containerName="collect-profiles" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.740832 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.742716 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.742757 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.752015 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.757772 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.758049 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.858515 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.858593 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.858668 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:47 crc kubenswrapper[4763]: I1206 08:12:47.877252 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.059372 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.109767 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" event={"ID":"f1cc0acf-7876-428e-8430-a14d2498a435","Type":"ContainerDied","Data":"14488aa2d6f725f64948eb9524dd6f7ad4676288d31261e5e1b5001020b4d54b"} Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.109803 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14488aa2d6f725f64948eb9524dd6f7ad4676288d31261e5e1b5001020b4d54b" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.109821 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.112077 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7gjl5" event={"ID":"20cce31d-dc9e-4669-830a-2663bde5c655","Type":"ContainerStarted","Data":"6e2ca18f0320f75d3e3f80abaa550eb633d55e5783cf40a29263f5ad9cb25055"} Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.279444 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 06 08:12:48 crc kubenswrapper[4763]: W1206 08:12:48.427101 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod63411e76_c7b8_4bc1_9ee1_f82cdc905252.slice/crio-a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501 WatchSource:0}: Error finding container a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501: Status 404 returned error can't find the container with id a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501 Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.569927 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.570392 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.571244 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.577702 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:48 crc kubenswrapper[4763]: E1206 08:12:48.635654 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cf352a5_4869_4b34_951c_ccd1c1da1fb2.slice/crio-3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cf352a5_4869_4b34_951c_ccd1c1da1fb2.slice/crio-conmon-3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884.scope\": RecentStats: unable to find data in memory cache]" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.648943 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.671531 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.674513 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.729675 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:48 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:48 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:48 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.729729 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.772837 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.777971 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.934054 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 06 08:12:48 crc kubenswrapper[4763]: I1206 08:12:48.942730 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:12:49 crc kubenswrapper[4763]: W1206 08:12:49.079521 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-0258d7776d1ee0027cf877d4cec7665e9c1e7472175f41383c68a078eca27774 WatchSource:0}: Error finding container 0258d7776d1ee0027cf877d4cec7665e9c1e7472175f41383c68a078eca27774: Status 404 returned error can't find the container with id 0258d7776d1ee0027cf877d4cec7665e9c1e7472175f41383c68a078eca27774 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.122328 4763 generic.go:334] "Generic (PLEG): container finished" podID="49651d40-4e4d-442a-9421-ed157e45ce24" containerID="b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.122612 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerDied","Data":"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.125740 4763 generic.go:334] "Generic (PLEG): container finished" podID="20cce31d-dc9e-4669-830a-2663bde5c655" containerID="6e2ca18f0320f75d3e3f80abaa550eb633d55e5783cf40a29263f5ad9cb25055" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.125801 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7gjl5" event={"ID":"20cce31d-dc9e-4669-830a-2663bde5c655","Type":"ContainerDied","Data":"6e2ca18f0320f75d3e3f80abaa550eb633d55e5783cf40a29263f5ad9cb25055"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.126820 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.130988 4763 generic.go:334] "Generic (PLEG): container finished" podID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerID="7714a8a6b79709126f364f0d62c56b1d3452f8a8fba6dcbd2a886d213e6d1539" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.131649 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerDied","Data":"7714a8a6b79709126f364f0d62c56b1d3452f8a8fba6dcbd2a886d213e6d1539"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.134644 4763 generic.go:334] "Generic (PLEG): container finished" podID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerID="ecd4554feb413e934a9bae078f649478d2be2b84816a89af93d5b1ce8db385d7" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.134728 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerDied","Data":"ecd4554feb413e934a9bae078f649478d2be2b84816a89af93d5b1ce8db385d7"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.139299 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0258d7776d1ee0027cf877d4cec7665e9c1e7472175f41383c68a078eca27774"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.146208 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" event={"ID":"28ac2e3e-807e-4d30-8775-de3438b3dee5","Type":"ContainerStarted","Data":"d5a5a23ce0f6196add8b4146ff1765414c0f7ae30a742cb68ae76a4790dc35cd"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.156447 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63411e76-c7b8-4bc1-9ee1-f82cdc905252","Type":"ContainerStarted","Data":"d0bb77c23b1eafa0f8dfff13743aef7b4572703b19a5e14a040e1bae5182cd47"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.156504 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63411e76-c7b8-4bc1-9ee1-f82cdc905252","Type":"ContainerStarted","Data":"a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.165644 4763 generic.go:334] "Generic (PLEG): container finished" podID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerID="3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.165750 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerDied","Data":"3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.176152 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" event={"ID":"7d3f1284-20c9-4aa5-9c45-3cc96943980c","Type":"ContainerStarted","Data":"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.176295 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.190423 4763 generic.go:334] "Generic (PLEG): container finished" podID="3284dbe1-107d-4881-87f3-05529f91e507" containerID="f2bb42374aff7d4059b949bc0f97d66c3a5f2b1d5aec780fd0a842f6b81421dc" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.190502 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3284dbe1-107d-4881-87f3-05529f91e507","Type":"ContainerDied","Data":"f2bb42374aff7d4059b949bc0f97d66c3a5f2b1d5aec780fd0a842f6b81421dc"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.193820 4763 generic.go:334] "Generic (PLEG): container finished" podID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerID="86473714f4ce3e0c0546d3efd9d2a4c483b250ae2cfc71b869c9dd2d74212843" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.193881 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerDied","Data":"86473714f4ce3e0c0546d3efd9d2a4c483b250ae2cfc71b869c9dd2d74212843"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.199001 4763 generic.go:334] "Generic (PLEG): container finished" podID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerID="3d2968cf875443dd4c64b5c639c3e62d3cb00cf67459f6cc7401e358a359b810" exitCode=0 Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.199061 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerDied","Data":"3d2968cf875443dd4c64b5c639c3e62d3cb00cf67459f6cc7401e358a359b810"} Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.247233 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" podStartSLOduration=28.247218865 podStartE2EDuration="28.247218865s" podCreationTimestamp="2025-12-06 08:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:49.246165996 +0000 UTC m=+51.821871034" watchObservedRunningTime="2025-12-06 08:12:49.247218865 +0000 UTC m=+51.822923893" Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.330346 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-b9lvw" podStartSLOduration=15.328457049 podStartE2EDuration="15.328457049s" podCreationTimestamp="2025-12-06 08:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:49.325775277 +0000 UTC m=+51.901480315" watchObservedRunningTime="2025-12-06 08:12:49.328457049 +0000 UTC m=+51.904162087" Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.533801 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.533785664 podStartE2EDuration="2.533785664s" podCreationTimestamp="2025-12-06 08:12:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:12:49.512354715 +0000 UTC m=+52.088059753" watchObservedRunningTime="2025-12-06 08:12:49.533785664 +0000 UTC m=+52.109490702" Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.728221 4763 patch_prober.go:28] interesting pod/router-default-5444994796-mt26w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 06 08:12:49 crc kubenswrapper[4763]: [-]has-synced failed: reason withheld Dec 06 08:12:49 crc kubenswrapper[4763]: [+]process-running ok Dec 06 08:12:49 crc kubenswrapper[4763]: healthz check failed Dec 06 08:12:49 crc kubenswrapper[4763]: I1206 08:12:49.728278 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mt26w" podUID="8be954b6-c6f3-4932-992d-736df8a687ce" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.203652 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"709f27ef37b408a4dc46d1225e621944343de6fc723cb20b8262de4d22ccbc6f"} Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.204335 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"329e27585d0d178d6533870b59c6375207870f9de7a1c8fc2d3e88e6daba4ad4"} Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.404386 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.525978 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir\") pod \"3284dbe1-107d-4881-87f3-05529f91e507\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.526090 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access\") pod \"3284dbe1-107d-4881-87f3-05529f91e507\" (UID: \"3284dbe1-107d-4881-87f3-05529f91e507\") " Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.526119 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3284dbe1-107d-4881-87f3-05529f91e507" (UID: "3284dbe1-107d-4881-87f3-05529f91e507"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.526305 4763 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3284dbe1-107d-4881-87f3-05529f91e507-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.553332 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3284dbe1-107d-4881-87f3-05529f91e507" (UID: "3284dbe1-107d-4881-87f3-05529f91e507"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.627879 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3284dbe1-107d-4881-87f3-05529f91e507-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.730598 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:50 crc kubenswrapper[4763]: I1206 08:12:50.733405 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-mt26w" Dec 06 08:12:51 crc kubenswrapper[4763]: I1206 08:12:51.210473 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3284dbe1-107d-4881-87f3-05529f91e507","Type":"ContainerDied","Data":"c2b4a83c4be2aaf5e07e625b061071b132be98ee78d94f7e5433a49b9c4a47be"} Dec 06 08:12:51 crc kubenswrapper[4763]: I1206 08:12:51.210527 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2b4a83c4be2aaf5e07e625b061071b132be98ee78d94f7e5433a49b9c4a47be" Dec 06 08:12:51 crc kubenswrapper[4763]: I1206 08:12:51.210491 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 06 08:12:52 crc kubenswrapper[4763]: I1206 08:12:52.202561 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-8m2kx" Dec 06 08:12:53 crc kubenswrapper[4763]: I1206 08:12:53.220485 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d5cc16b6110a2dea8032710885f6b3108359df920461364aa56121076b404b69"} Dec 06 08:12:53 crc kubenswrapper[4763]: I1206 08:12:53.221717 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4c8c162ae352ba665d7304c1fa7e8192787319959822723036c59b1047a18925"} Dec 06 08:12:53 crc kubenswrapper[4763]: I1206 08:12:53.223365 4763 generic.go:334] "Generic (PLEG): container finished" podID="63411e76-c7b8-4bc1-9ee1-f82cdc905252" containerID="d0bb77c23b1eafa0f8dfff13743aef7b4572703b19a5e14a040e1bae5182cd47" exitCode=0 Dec 06 08:12:53 crc kubenswrapper[4763]: I1206 08:12:53.223432 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63411e76-c7b8-4bc1-9ee1-f82cdc905252","Type":"ContainerDied","Data":"d0bb77c23b1eafa0f8dfff13743aef7b4572703b19a5e14a040e1bae5182cd47"} Dec 06 08:12:53 crc kubenswrapper[4763]: I1206 08:12:53.224928 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f6d959ec3a3e04477a457bfed8d7e11cbbc14ea93c6bffe6c1c5c8692af4ec8e"} Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.513966 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.574234 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access\") pod \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.574384 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir\") pod \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\" (UID: \"63411e76-c7b8-4bc1-9ee1-f82cdc905252\") " Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.574686 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "63411e76-c7b8-4bc1-9ee1-f82cdc905252" (UID: "63411e76-c7b8-4bc1-9ee1-f82cdc905252"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.586186 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "63411e76-c7b8-4bc1-9ee1-f82cdc905252" (UID: "63411e76-c7b8-4bc1-9ee1-f82cdc905252"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.676338 4763 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:54 crc kubenswrapper[4763]: I1206 08:12:54.676376 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63411e76-c7b8-4bc1-9ee1-f82cdc905252-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:12:55 crc kubenswrapper[4763]: I1206 08:12:55.275287 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63411e76-c7b8-4bc1-9ee1-f82cdc905252","Type":"ContainerDied","Data":"a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501"} Dec 06 08:12:55 crc kubenswrapper[4763]: I1206 08:12:55.275609 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a315bd224fd594b4f11ab7a0b1081f0ce778960ae05607056d1a538f9e2c3501" Dec 06 08:12:55 crc kubenswrapper[4763]: I1206 08:12:55.275334 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 06 08:12:56 crc kubenswrapper[4763]: I1206 08:12:56.630087 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4xfrk" Dec 06 08:12:56 crc kubenswrapper[4763]: I1206 08:12:56.717950 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:12:57 crc kubenswrapper[4763]: E1206 08:12:57.203045 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:57 crc kubenswrapper[4763]: E1206 08:12:57.205546 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:57 crc kubenswrapper[4763]: E1206 08:12:57.210361 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:12:57 crc kubenswrapper[4763]: E1206 08:12:57.210391 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:12:57 crc kubenswrapper[4763]: I1206 08:12:57.318131 4763 patch_prober.go:28] interesting pod/console-f9d7485db-7j4g2 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 06 08:12:57 crc kubenswrapper[4763]: I1206 08:12:57.318200 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7j4g2" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 06 08:12:58 crc kubenswrapper[4763]: I1206 08:12:58.944079 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:13:05 crc kubenswrapper[4763]: I1206 08:13:05.707794 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:13:07 crc kubenswrapper[4763]: E1206 08:13:07.195384 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:07 crc kubenswrapper[4763]: E1206 08:13:07.196572 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:07 crc kubenswrapper[4763]: E1206 08:13:07.197823 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:07 crc kubenswrapper[4763]: E1206 08:13:07.197961 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:13:07 crc kubenswrapper[4763]: I1206 08:13:07.359396 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:13:07 crc kubenswrapper[4763]: I1206 08:13:07.364855 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:13:15 crc kubenswrapper[4763]: I1206 08:13:15.451454 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-ln8pp_954f9d07-1e99-48fa-bd6d-b61d22ee7faa/kube-multus-additional-cni-plugins/0.log" Dec 06 08:13:15 crc kubenswrapper[4763]: I1206 08:13:15.451954 4763 generic.go:334] "Generic (PLEG): container finished" podID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" exitCode=137 Dec 06 08:13:15 crc kubenswrapper[4763]: I1206 08:13:15.451989 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" event={"ID":"954f9d07-1e99-48fa-bd6d-b61d22ee7faa","Type":"ContainerDied","Data":"721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044"} Dec 06 08:13:17 crc kubenswrapper[4763]: I1206 08:13:17.098940 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-xl75m" Dec 06 08:13:17 crc kubenswrapper[4763]: E1206 08:13:17.194235 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:17 crc kubenswrapper[4763]: E1206 08:13:17.194883 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:17 crc kubenswrapper[4763]: E1206 08:13:17.195343 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:17 crc kubenswrapper[4763]: E1206 08:13:17.195392 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.336261 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 08:13:21 crc kubenswrapper[4763]: E1206 08:13:21.336736 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3284dbe1-107d-4881-87f3-05529f91e507" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.336749 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3284dbe1-107d-4881-87f3-05529f91e507" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: E1206 08:13:21.336766 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63411e76-c7b8-4bc1-9ee1-f82cdc905252" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.336771 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="63411e76-c7b8-4bc1-9ee1-f82cdc905252" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.336861 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="63411e76-c7b8-4bc1-9ee1-f82cdc905252" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.336876 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3284dbe1-107d-4881-87f3-05529f91e507" containerName="pruner" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.337223 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.340424 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.340698 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.356587 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.523461 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.523514 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.625092 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.625403 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.625495 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.644520 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:21 crc kubenswrapper[4763]: I1206 08:13:21.659783 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.535043 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.537460 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.550708 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.580772 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.580834 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.580867 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.681869 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.682003 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.682035 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.682299 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.682291 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.700802 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access\") pod \"installer-9-crc\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:25 crc kubenswrapper[4763]: I1206 08:13:25.852883 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:13:27 crc kubenswrapper[4763]: E1206 08:13:27.194320 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:27 crc kubenswrapper[4763]: E1206 08:13:27.194873 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:27 crc kubenswrapper[4763]: E1206 08:13:27.195388 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:27 crc kubenswrapper[4763]: E1206 08:13:27.195451 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:13:28 crc kubenswrapper[4763]: I1206 08:13:28.733697 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 06 08:13:36 crc kubenswrapper[4763]: I1206 08:13:36.200664 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 06 08:13:36 crc kubenswrapper[4763]: I1206 08:13:36.255189 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=8.255161495 podStartE2EDuration="8.255161495s" podCreationTimestamp="2025-12-06 08:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:13:36.245588948 +0000 UTC m=+98.821293996" watchObservedRunningTime="2025-12-06 08:13:36.255161495 +0000 UTC m=+98.830866573" Dec 06 08:13:37 crc kubenswrapper[4763]: E1206 08:13:37.193736 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:37 crc kubenswrapper[4763]: E1206 08:13:37.194086 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:37 crc kubenswrapper[4763]: E1206 08:13:37.194401 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:37 crc kubenswrapper[4763]: E1206 08:13:37.194432 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:13:47 crc kubenswrapper[4763]: E1206 08:13:47.197872 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:47 crc kubenswrapper[4763]: E1206 08:13:47.199106 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:47 crc kubenswrapper[4763]: E1206 08:13:47.199634 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 06 08:13:47 crc kubenswrapper[4763]: E1206 08:13:47.199700 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:13:51 crc kubenswrapper[4763]: E1206 08:13:51.831294 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 06 08:13:51 crc kubenswrapper[4763]: E1206 08:13:51.832351 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d9r24,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mdc4f_openshift-marketplace(ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:51 crc kubenswrapper[4763]: E1206 08:13:51.834080 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mdc4f" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.443806 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mdc4f" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.518042 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.518244 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mdwzr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-rb2sc_openshift-marketplace(cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.519485 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-rb2sc" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.524528 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.524694 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pwptg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-92drr_openshift-marketplace(411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:53 crc kubenswrapper[4763]: E1206 08:13:53.525957 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-92drr" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.613642 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-92drr" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.613647 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-rb2sc" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.689273 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.689443 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-75d7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gntcq_openshift-marketplace(49651d40-4e4d-442a-9421-ed157e45ce24): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.690818 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gntcq" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.707924 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.708036 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-frnq9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-p7hj6_openshift-marketplace(0dcae7a8-a85b-4cb4-89d5-39f169eb2dff): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.709208 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-p7hj6" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.709619 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.709796 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2skqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-ssrkx_openshift-marketplace(9cf352a5-4869-4b34-951c-ccd1c1da1fb2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:54 crc kubenswrapper[4763]: E1206 08:13:54.710953 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-ssrkx" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.854865 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-ssrkx" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.854951 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gntcq" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.855066 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-p7hj6" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.944799 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.945208 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6pcdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-glwdn_openshift-marketplace(33bff8df-dbde-4ad7-8edc-18b8848bd87e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.946413 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-glwdn" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" Dec 06 08:13:55 crc kubenswrapper[4763]: I1206 08:13:55.952991 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-ln8pp_954f9d07-1e99-48fa-bd6d-b61d22ee7faa/kube-multus-additional-cni-plugins/0.log" Dec 06 08:13:55 crc kubenswrapper[4763]: I1206 08:13:55.953061 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.965808 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.966069 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bc2f6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7gjl5_openshift-marketplace(20cce31d-dc9e-4669-830a-2663bde5c655): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:13:55 crc kubenswrapper[4763]: E1206 08:13:55.967811 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7gjl5" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.104978 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir\") pod \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105069 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "954f9d07-1e99-48fa-bd6d-b61d22ee7faa" (UID: "954f9d07-1e99-48fa-bd6d-b61d22ee7faa"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105177 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist\") pod \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105216 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6m64\" (UniqueName: \"kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64\") pod \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105289 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready\") pod \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\" (UID: \"954f9d07-1e99-48fa-bd6d-b61d22ee7faa\") " Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105688 4763 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105953 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready" (OuterVolumeSpecName: "ready") pod "954f9d07-1e99-48fa-bd6d-b61d22ee7faa" (UID: "954f9d07-1e99-48fa-bd6d-b61d22ee7faa"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.105955 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "954f9d07-1e99-48fa-bd6d-b61d22ee7faa" (UID: "954f9d07-1e99-48fa-bd6d-b61d22ee7faa"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.120474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64" (OuterVolumeSpecName: "kube-api-access-c6m64") pod "954f9d07-1e99-48fa-bd6d-b61d22ee7faa" (UID: "954f9d07-1e99-48fa-bd6d-b61d22ee7faa"). InnerVolumeSpecName "kube-api-access-c6m64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.206606 4763 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.206642 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6m64\" (UniqueName: \"kubernetes.io/projected/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-kube-api-access-c6m64\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.206661 4763 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/954f9d07-1e99-48fa-bd6d-b61d22ee7faa-ready\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.246771 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.326538 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 06 08:13:56 crc kubenswrapper[4763]: W1206 08:13:56.333582 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod27ded1af_d1b2_44f7_899e_12946c3e3a8e.slice/crio-e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215 WatchSource:0}: Error finding container e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215: Status 404 returned error can't find the container with id e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215 Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.671156 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"27ded1af-d1b2-44f7-899e-12946c3e3a8e","Type":"ContainerStarted","Data":"57170eaa08209b9ed88fdf61ea729946d3c83860c93e0b28dc641f6f9e9f0b1f"} Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.671574 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"27ded1af-d1b2-44f7-899e-12946c3e3a8e","Type":"ContainerStarted","Data":"e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215"} Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.673115 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-ln8pp_954f9d07-1e99-48fa-bd6d-b61d22ee7faa/kube-multus-additional-cni-plugins/0.log" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.673160 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" event={"ID":"954f9d07-1e99-48fa-bd6d-b61d22ee7faa","Type":"ContainerDied","Data":"dfbc8f617779ef31295d559d2a5a230f449e5c08e106a0e2123c9b893eb25a12"} Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.673195 4763 scope.go:117] "RemoveContainer" containerID="721502f8842d90e5d4d9b01526a2cf4f5ffcd8280a91953410b219e5ff181044" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.673268 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-ln8pp" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.675789 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"778d7873-d619-45ef-974a-767527ddcdcc","Type":"ContainerStarted","Data":"5cfcec397d1cfc2fccf86b49f12b67e6bd6a110c02728160bad3378820c02e43"} Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.675818 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"778d7873-d619-45ef-974a-767527ddcdcc","Type":"ContainerStarted","Data":"9a7c3186e7bd7b7cb64450cdab290ab4cd84dd23f6da06436ba37b35d45d376e"} Dec 06 08:13:56 crc kubenswrapper[4763]: E1206 08:13:56.676271 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7gjl5" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" Dec 06 08:13:56 crc kubenswrapper[4763]: E1206 08:13:56.678568 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-glwdn" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.694739 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=31.694718002 podStartE2EDuration="31.694718002s" podCreationTimestamp="2025-12-06 08:13:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:13:56.688445934 +0000 UTC m=+119.264150982" watchObservedRunningTime="2025-12-06 08:13:56.694718002 +0000 UTC m=+119.270423040" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.706646 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=35.706625163 podStartE2EDuration="35.706625163s" podCreationTimestamp="2025-12-06 08:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:13:56.704626699 +0000 UTC m=+119.280331747" watchObservedRunningTime="2025-12-06 08:13:56.706625163 +0000 UTC m=+119.282330201" Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.750290 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-ln8pp"] Dec 06 08:13:56 crc kubenswrapper[4763]: I1206 08:13:56.753485 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-ln8pp"] Dec 06 08:13:57 crc kubenswrapper[4763]: I1206 08:13:57.683028 4763 generic.go:334] "Generic (PLEG): container finished" podID="778d7873-d619-45ef-974a-767527ddcdcc" containerID="5cfcec397d1cfc2fccf86b49f12b67e6bd6a110c02728160bad3378820c02e43" exitCode=0 Dec 06 08:13:57 crc kubenswrapper[4763]: I1206 08:13:57.683111 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"778d7873-d619-45ef-974a-767527ddcdcc","Type":"ContainerDied","Data":"5cfcec397d1cfc2fccf86b49f12b67e6bd6a110c02728160bad3378820c02e43"} Dec 06 08:13:57 crc kubenswrapper[4763]: I1206 08:13:57.731114 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" path="/var/lib/kubelet/pods/954f9d07-1e99-48fa-bd6d-b61d22ee7faa/volumes" Dec 06 08:13:58 crc kubenswrapper[4763]: I1206 08:13:58.913978 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.037882 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir\") pod \"778d7873-d619-45ef-974a-767527ddcdcc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.037957 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access\") pod \"778d7873-d619-45ef-974a-767527ddcdcc\" (UID: \"778d7873-d619-45ef-974a-767527ddcdcc\") " Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.037979 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "778d7873-d619-45ef-974a-767527ddcdcc" (UID: "778d7873-d619-45ef-974a-767527ddcdcc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.038207 4763 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/778d7873-d619-45ef-974a-767527ddcdcc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.043183 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "778d7873-d619-45ef-974a-767527ddcdcc" (UID: "778d7873-d619-45ef-974a-767527ddcdcc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.139198 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/778d7873-d619-45ef-974a-767527ddcdcc-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.693459 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"778d7873-d619-45ef-974a-767527ddcdcc","Type":"ContainerDied","Data":"9a7c3186e7bd7b7cb64450cdab290ab4cd84dd23f6da06436ba37b35d45d376e"} Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.693756 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a7c3186e7bd7b7cb64450cdab290ab4cd84dd23f6da06436ba37b35d45d376e" Dec 06 08:13:59 crc kubenswrapper[4763]: I1206 08:13:59.693527 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 06 08:14:06 crc kubenswrapper[4763]: I1206 08:14:06.728268 4763 generic.go:334] "Generic (PLEG): container finished" podID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerID="5fa126529ebfd9f8d654fdb4163dd9313138d45dbfe91ba77b7f2932c6bbe253" exitCode=0 Dec 06 08:14:06 crc kubenswrapper[4763]: I1206 08:14:06.728351 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerDied","Data":"5fa126529ebfd9f8d654fdb4163dd9313138d45dbfe91ba77b7f2932c6bbe253"} Dec 06 08:14:06 crc kubenswrapper[4763]: I1206 08:14:06.731006 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerStarted","Data":"81513c65f4d1e32374bc191787bb3aa7b8c727dea8f10657186f441129c66b2d"} Dec 06 08:14:07 crc kubenswrapper[4763]: I1206 08:14:07.752659 4763 generic.go:334] "Generic (PLEG): container finished" podID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerID="81513c65f4d1e32374bc191787bb3aa7b8c727dea8f10657186f441129c66b2d" exitCode=0 Dec 06 08:14:07 crc kubenswrapper[4763]: I1206 08:14:07.752726 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerDied","Data":"81513c65f4d1e32374bc191787bb3aa7b8c727dea8f10657186f441129c66b2d"} Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.760212 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerStarted","Data":"7ef2e01001160daffe9d73462285a7c68140d85927beaed547681090d459b02c"} Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.764428 4763 generic.go:334] "Generic (PLEG): container finished" podID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerID="3dcc1aeca2decdce6b3febe04965607767fb40d4d75a087046c11dc9957c8caf" exitCode=0 Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.764509 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerDied","Data":"3dcc1aeca2decdce6b3febe04965607767fb40d4d75a087046c11dc9957c8caf"} Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.767939 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerStarted","Data":"48549c600a5e9b7f352150e77549727de127abfa515b10fa43b3f2aa37be8cc3"} Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.805139 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mdc4f" podStartSLOduration=6.231983259 podStartE2EDuration="1m24.805111211s" podCreationTimestamp="2025-12-06 08:12:44 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.13230174 +0000 UTC m=+51.708006778" lastFinishedPulling="2025-12-06 08:14:07.705429692 +0000 UTC m=+130.281134730" observedRunningTime="2025-12-06 08:14:08.783914332 +0000 UTC m=+131.359619370" watchObservedRunningTime="2025-12-06 08:14:08.805111211 +0000 UTC m=+131.380816249" Dec 06 08:14:08 crc kubenswrapper[4763]: I1206 08:14:08.825691 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rb2sc" podStartSLOduration=5.8014302 podStartE2EDuration="1m24.825671014s" podCreationTimestamp="2025-12-06 08:12:44 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.19633696 +0000 UTC m=+51.772041998" lastFinishedPulling="2025-12-06 08:14:08.220577774 +0000 UTC m=+130.796282812" observedRunningTime="2025-12-06 08:14:08.82366356 +0000 UTC m=+131.399368618" watchObservedRunningTime="2025-12-06 08:14:08.825671014 +0000 UTC m=+131.401376052" Dec 06 08:14:09 crc kubenswrapper[4763]: I1206 08:14:09.774801 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerStarted","Data":"c764195b9e13327744de1da5d6c325b96e398b0064358e355b484b95d5879006"} Dec 06 08:14:09 crc kubenswrapper[4763]: I1206 08:14:09.776725 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerStarted","Data":"c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259"} Dec 06 08:14:09 crc kubenswrapper[4763]: I1206 08:14:09.817799 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p7hj6" podStartSLOduration=7.7939317500000005 podStartE2EDuration="1m27.81778044s" podCreationTimestamp="2025-12-06 08:12:42 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.200946425 +0000 UTC m=+51.776651463" lastFinishedPulling="2025-12-06 08:14:09.224795115 +0000 UTC m=+131.800500153" observedRunningTime="2025-12-06 08:14:09.817451941 +0000 UTC m=+132.393156989" watchObservedRunningTime="2025-12-06 08:14:09.81778044 +0000 UTC m=+132.393485478" Dec 06 08:14:10 crc kubenswrapper[4763]: I1206 08:14:10.783104 4763 generic.go:334] "Generic (PLEG): container finished" podID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerID="c764195b9e13327744de1da5d6c325b96e398b0064358e355b484b95d5879006" exitCode=0 Dec 06 08:14:10 crc kubenswrapper[4763]: I1206 08:14:10.783193 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerDied","Data":"c764195b9e13327744de1da5d6c325b96e398b0064358e355b484b95d5879006"} Dec 06 08:14:10 crc kubenswrapper[4763]: I1206 08:14:10.786062 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerStarted","Data":"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc"} Dec 06 08:14:11 crc kubenswrapper[4763]: I1206 08:14:11.792271 4763 generic.go:334] "Generic (PLEG): container finished" podID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerID="c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc" exitCode=0 Dec 06 08:14:11 crc kubenswrapper[4763]: I1206 08:14:11.792357 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerDied","Data":"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc"} Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062233 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hsbkg"] Dec 06 08:14:12 crc kubenswrapper[4763]: E1206 08:14:12.062438 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062448 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:14:12 crc kubenswrapper[4763]: E1206 08:14:12.062456 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778d7873-d619-45ef-974a-767527ddcdcc" containerName="pruner" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062463 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="778d7873-d619-45ef-974a-767527ddcdcc" containerName="pruner" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062582 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="954f9d07-1e99-48fa-bd6d-b61d22ee7faa" containerName="kube-multus-additional-cni-plugins" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062601 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="778d7873-d619-45ef-974a-767527ddcdcc" containerName="pruner" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.062982 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.075832 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hsbkg"] Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192518 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-certificates\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192592 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192728 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-tls\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192764 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192812 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192872 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfjb2\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-kube-api-access-jfjb2\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192959 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-trusted-ca\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.192998 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-bound-sa-token\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.213997 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294459 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfjb2\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-kube-api-access-jfjb2\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294534 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-trusted-ca\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294566 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-bound-sa-token\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294613 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-certificates\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294665 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-tls\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.294718 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.295205 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.296046 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-trusted-ca\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.296225 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-certificates\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.304648 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.310914 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfjb2\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-kube-api-access-jfjb2\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.316064 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-registry-tls\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.316456 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdc178a0-0a78-4b8b-85ba-32167fdbcf89-bound-sa-token\") pod \"image-registry-66df7c8f76-hsbkg\" (UID: \"fdc178a0-0a78-4b8b-85ba-32167fdbcf89\") " pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.379015 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.799993 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerStarted","Data":"6ad459f89c6daaca4d156837f9f29cdd273bffd3d92de05ff8883cc9c5daf3d5"} Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.802170 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerStarted","Data":"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061"} Dec 06 08:14:12 crc kubenswrapper[4763]: I1206 08:14:12.816026 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-92drr" podStartSLOduration=5.802900453 podStartE2EDuration="1m27.81600933s" podCreationTimestamp="2025-12-06 08:12:45 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.136473783 +0000 UTC m=+51.712178821" lastFinishedPulling="2025-12-06 08:14:11.14958266 +0000 UTC m=+133.725287698" observedRunningTime="2025-12-06 08:14:12.81529452 +0000 UTC m=+135.390999558" watchObservedRunningTime="2025-12-06 08:14:12.81600933 +0000 UTC m=+135.391714368" Dec 06 08:14:13 crc kubenswrapper[4763]: I1206 08:14:13.301949 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:14:13 crc kubenswrapper[4763]: I1206 08:14:13.301998 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:14:13 crc kubenswrapper[4763]: I1206 08:14:13.505597 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:14:13 crc kubenswrapper[4763]: I1206 08:14:13.808802 4763 generic.go:334] "Generic (PLEG): container finished" podID="49651d40-4e4d-442a-9421-ed157e45ce24" containerID="8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061" exitCode=0 Dec 06 08:14:13 crc kubenswrapper[4763]: I1206 08:14:13.808881 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerDied","Data":"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061"} Dec 06 08:14:14 crc kubenswrapper[4763]: I1206 08:14:14.879839 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:14 crc kubenswrapper[4763]: I1206 08:14:14.879885 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:14 crc kubenswrapper[4763]: I1206 08:14:14.922620 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:15 crc kubenswrapper[4763]: I1206 08:14:15.296691 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:15 crc kubenswrapper[4763]: I1206 08:14:15.296734 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:15 crc kubenswrapper[4763]: I1206 08:14:15.336028 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:15 crc kubenswrapper[4763]: I1206 08:14:15.856787 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:15 crc kubenswrapper[4763]: I1206 08:14:15.936678 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:16 crc kubenswrapper[4763]: I1206 08:14:16.391160 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:14:16 crc kubenswrapper[4763]: I1206 08:14:16.391506 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:14:17 crc kubenswrapper[4763]: I1206 08:14:17.435469 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-92drr" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="registry-server" probeResult="failure" output=< Dec 06 08:14:17 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 08:14:17 crc kubenswrapper[4763]: > Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.212828 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.217299 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.222348 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.225374 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.226124 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p7hj6" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" containerID="cri-o://c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" gracePeriod=30 Dec 06 08:14:18 crc kubenswrapper[4763]: E1206 08:14:18.230450 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:18 crc kubenswrapper[4763]: E1206 08:14:18.231982 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:18 crc kubenswrapper[4763]: E1206 08:14:18.233255 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:18 crc kubenswrapper[4763]: E1206 08:14:18.233319 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-marketplace/community-operators-p7hj6" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.236780 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.237008 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" containerID="cri-o://7544640d9b3b579addd91de0445aa2a4c9ad825fd05fd6faad88d2a330f05447" gracePeriod=30 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.244626 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.244955 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mdc4f" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="registry-server" containerID="cri-o://7ef2e01001160daffe9d73462285a7c68140d85927beaed547681090d459b02c" gracePeriod=30 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.248774 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.248992 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rb2sc" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="registry-server" containerID="cri-o://48549c600a5e9b7f352150e77549727de127abfa515b10fa43b3f2aa37be8cc3" gracePeriod=30 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.258248 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fsn7"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.258968 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.262427 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.262614 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-92drr" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="registry-server" containerID="cri-o://6ad459f89c6daaca4d156837f9f29cdd273bffd3d92de05ff8883cc9c5daf3d5" gracePeriod=30 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.267119 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fsn7"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.269818 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.375080 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.375152 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pq5p\" (UniqueName: \"kubernetes.io/projected/61203368-9fce-4808-ae90-b4a955f5f893-kube-api-access-7pq5p\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.375265 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.476780 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.477042 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pq5p\" (UniqueName: \"kubernetes.io/projected/61203368-9fce-4808-ae90-b4a955f5f893-kube-api-access-7pq5p\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.477084 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.478760 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.482780 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61203368-9fce-4808-ae90-b4a955f5f893-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.495215 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pq5p\" (UniqueName: \"kubernetes.io/projected/61203368-9fce-4808-ae90-b4a955f5f893-kube-api-access-7pq5p\") pod \"marketplace-operator-79b997595-4fsn7\" (UID: \"61203368-9fce-4808-ae90-b4a955f5f893\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.577998 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.833212 4763 generic.go:334] "Generic (PLEG): container finished" podID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerID="7ef2e01001160daffe9d73462285a7c68140d85927beaed547681090d459b02c" exitCode=0 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.833540 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerDied","Data":"7ef2e01001160daffe9d73462285a7c68140d85927beaed547681090d459b02c"} Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.836864 4763 generic.go:334] "Generic (PLEG): container finished" podID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerID="48549c600a5e9b7f352150e77549727de127abfa515b10fa43b3f2aa37be8cc3" exitCode=0 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.836931 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerDied","Data":"48549c600a5e9b7f352150e77549727de127abfa515b10fa43b3f2aa37be8cc3"} Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.838749 4763 generic.go:334] "Generic (PLEG): container finished" podID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" exitCode=0 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.838796 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerDied","Data":"c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259"} Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.839866 4763 generic.go:334] "Generic (PLEG): container finished" podID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerID="7544640d9b3b579addd91de0445aa2a4c9ad825fd05fd6faad88d2a330f05447" exitCode=0 Dec 06 08:14:18 crc kubenswrapper[4763]: I1206 08:14:18.839891 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" event={"ID":"c726fd5f-7588-4b80-843b-b9f864be53ea","Type":"ContainerDied","Data":"7544640d9b3b579addd91de0445aa2a4c9ad825fd05fd6faad88d2a330f05447"} Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.050425 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.153673 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.186010 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtmss\" (UniqueName: \"kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss\") pod \"c726fd5f-7588-4b80-843b-b9f864be53ea\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.186071 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca\") pod \"c726fd5f-7588-4b80-843b-b9f864be53ea\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.186127 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics\") pod \"c726fd5f-7588-4b80-843b-b9f864be53ea\" (UID: \"c726fd5f-7588-4b80-843b-b9f864be53ea\") " Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.187237 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "c726fd5f-7588-4b80-843b-b9f864be53ea" (UID: "c726fd5f-7588-4b80-843b-b9f864be53ea"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.191525 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "c726fd5f-7588-4b80-843b-b9f864be53ea" (UID: "c726fd5f-7588-4b80-843b-b9f864be53ea"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.191726 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss" (OuterVolumeSpecName: "kube-api-access-gtmss") pod "c726fd5f-7588-4b80-843b-b9f864be53ea" (UID: "c726fd5f-7588-4b80-843b-b9f864be53ea"). InnerVolumeSpecName "kube-api-access-gtmss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.288492 4763 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.288525 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtmss\" (UniqueName: \"kubernetes.io/projected/c726fd5f-7588-4b80-843b-b9f864be53ea-kube-api-access-gtmss\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.288539 4763 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c726fd5f-7588-4b80-843b-b9f864be53ea-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.848530 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" event={"ID":"c726fd5f-7588-4b80-843b-b9f864be53ea","Type":"ContainerDied","Data":"5a3bd3222a71fa9597dab46fcbc25d1acbe3dd84c8b42f04e2355692a0bf603e"} Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.848597 4763 scope.go:117] "RemoveContainer" containerID="7544640d9b3b579addd91de0445aa2a4c9ad825fd05fd6faad88d2a330f05447" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.848708 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gxwtv" Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.852883 4763 generic.go:334] "Generic (PLEG): container finished" podID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerID="6ad459f89c6daaca4d156837f9f29cdd273bffd3d92de05ff8883cc9c5daf3d5" exitCode=0 Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.853436 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerDied","Data":"6ad459f89c6daaca4d156837f9f29cdd273bffd3d92de05ff8883cc9c5daf3d5"} Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.893622 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.898147 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gxwtv"] Dec 06 08:14:19 crc kubenswrapper[4763]: I1206 08:14:19.901920 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lrzmx"] Dec 06 08:14:21 crc kubenswrapper[4763]: I1206 08:14:21.728658 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" path="/var/lib/kubelet/pods/c726fd5f-7588-4b80-843b-b9f864be53ea/volumes" Dec 06 08:14:23 crc kubenswrapper[4763]: E1206 08:14:23.303245 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259 is running failed: container process not found" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:23 crc kubenswrapper[4763]: E1206 08:14:23.303602 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259 is running failed: container process not found" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:23 crc kubenswrapper[4763]: E1206 08:14:23.304133 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259 is running failed: container process not found" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" cmd=["grpc_health_probe","-addr=:50051"] Dec 06 08:14:23 crc kubenswrapper[4763]: E1206 08:14:23.304165 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-p7hj6" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.552849 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.561252 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.566169 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.583053 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.644488 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content\") pod \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.644766 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frnq9\" (UniqueName: \"kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9\") pod \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.644859 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content\") pod \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.644987 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities\") pod \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645111 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities\") pod \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645195 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content\") pod \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645278 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdwzr\" (UniqueName: \"kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr\") pod \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645365 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities\") pod \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\" (UID: \"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645468 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content\") pod \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\" (UID: \"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645562 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities\") pod \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645643 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwptg\" (UniqueName: \"kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg\") pod \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\" (UID: \"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.645726 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9r24\" (UniqueName: \"kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24\") pod \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\" (UID: \"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d\") " Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.648484 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities" (OuterVolumeSpecName: "utilities") pod "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" (UID: "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.651850 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg" (OuterVolumeSpecName: "kube-api-access-pwptg") pod "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" (UID: "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d"). InnerVolumeSpecName "kube-api-access-pwptg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.652243 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24" (OuterVolumeSpecName: "kube-api-access-d9r24") pod "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" (UID: "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d"). InnerVolumeSpecName "kube-api-access-d9r24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.652662 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities" (OuterVolumeSpecName: "utilities") pod "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" (UID: "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.656945 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9" (OuterVolumeSpecName: "kube-api-access-frnq9") pod "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" (UID: "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff"). InnerVolumeSpecName "kube-api-access-frnq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.657397 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities" (OuterVolumeSpecName: "utilities") pod "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" (UID: "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.658063 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities" (OuterVolumeSpecName: "utilities") pod "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" (UID: "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.746972 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747028 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747038 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747046 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747055 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwptg\" (UniqueName: \"kubernetes.io/projected/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-kube-api-access-pwptg\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747066 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9r24\" (UniqueName: \"kubernetes.io/projected/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-kube-api-access-d9r24\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.747074 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frnq9\" (UniqueName: \"kubernetes.io/projected/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-kube-api-access-frnq9\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.839746 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr" (OuterVolumeSpecName: "kube-api-access-mdwzr") pod "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" (UID: "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf"). InnerVolumeSpecName "kube-api-access-mdwzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.847763 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdwzr\" (UniqueName: \"kubernetes.io/projected/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-kube-api-access-mdwzr\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.859797 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" (UID: "ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.863413 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" (UID: "cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.875922 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7hj6" event={"ID":"0dcae7a8-a85b-4cb4-89d5-39f169eb2dff","Type":"ContainerDied","Data":"b089e2ad53b0ade5914cd18b580ad82eddb6e0578b08b480107a086f8b93f9ad"} Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.876025 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7hj6" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.900553 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rb2sc" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.901035 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rb2sc" event={"ID":"cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf","Type":"ContainerDied","Data":"83b428752683031cb1aff83723bba67a6aabb50cf01080e490d40c54d80551d7"} Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.904668 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" (UID: "0dcae7a8-a85b-4cb4-89d5-39f169eb2dff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.909493 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdc4f" event={"ID":"ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d","Type":"ContainerDied","Data":"29f0640c6e741a8a1e0200b8b39b228690999e97849dd6048d64ae315476434b"} Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.909602 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdc4f" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.933702 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92drr" event={"ID":"411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d","Type":"ContainerDied","Data":"7e1588f327492797d74dfc6c0da2a3195f53489d6a0e54c2d01685057a8be7a4"} Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.933803 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92drr" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.948647 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.948683 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.948696 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.958538 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.960775 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdc4f"] Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.973359 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:14:23 crc kubenswrapper[4763]: I1206 08:14:23.986171 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rb2sc"] Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.206413 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.214588 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p7hj6"] Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.286994 4763 scope.go:117] "RemoveContainer" containerID="c43b045e123627e992b2291df0da602067ecff6cb22119cec0ee1d26aa7e3259" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.570225 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" (UID: "411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.661869 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.864744 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.868485 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-92drr"] Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.958016 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kg8kh"] Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.958484 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.958571 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.958674 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.958759 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.958842 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.958935 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959009 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959064 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959130 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959184 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959256 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959330 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959407 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959489 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959598 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959678 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959760 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.959842 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="extract-utilities" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.959947 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960011 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.960074 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960140 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.960197 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960250 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" Dec 06 08:14:24 crc kubenswrapper[4763]: E1206 08:14:24.960310 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960368 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="extract-content" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960528 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c726fd5f-7588-4b80-843b-b9f864be53ea" containerName="marketplace-operator" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960691 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960756 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960812 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.960869 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" containerName="registry-server" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.961642 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.964654 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 08:14:24 crc kubenswrapper[4763]: I1206 08:14:24.968073 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg8kh"] Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.066101 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt27b\" (UniqueName: \"kubernetes.io/projected/a6f5ed36-5b72-45e8-8aea-5715275f5f41-kube-api-access-kt27b\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.066151 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-utilities\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.066185 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-catalog-content\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.167045 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt27b\" (UniqueName: \"kubernetes.io/projected/a6f5ed36-5b72-45e8-8aea-5715275f5f41-kube-api-access-kt27b\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.167139 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-utilities\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.167200 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-catalog-content\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.167711 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-utilities\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.167818 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f5ed36-5b72-45e8-8aea-5715275f5f41-catalog-content\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.182394 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt27b\" (UniqueName: \"kubernetes.io/projected/a6f5ed36-5b72-45e8-8aea-5715275f5f41-kube-api-access-kt27b\") pod \"redhat-marketplace-kg8kh\" (UID: \"a6f5ed36-5b72-45e8-8aea-5715275f5f41\") " pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.281824 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.732089 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dcae7a8-a85b-4cb4-89d5-39f169eb2dff" path="/var/lib/kubelet/pods/0dcae7a8-a85b-4cb4-89d5-39f169eb2dff/volumes" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.733394 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d" path="/var/lib/kubelet/pods/411a9cc4-5c2c-4060-8a7d-f07b95ee6b0d/volumes" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.734716 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf" path="/var/lib/kubelet/pods/cc4bb178-0642-4fd9-8a54-1cfe3de5c3bf/volumes" Dec 06 08:14:25 crc kubenswrapper[4763]: I1206 08:14:25.737090 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d" path="/var/lib/kubelet/pods/ef3a25db-4a5f-4fb2-88c3-6dfd2ce24d6d/volumes" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.348303 4763 scope.go:117] "RemoveContainer" containerID="3dcc1aeca2decdce6b3febe04965607767fb40d4d75a087046c11dc9957c8caf" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.400574 4763 scope.go:117] "RemoveContainer" containerID="3d2968cf875443dd4c64b5c639c3e62d3cb00cf67459f6cc7401e358a359b810" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.438742 4763 scope.go:117] "RemoveContainer" containerID="48549c600a5e9b7f352150e77549727de127abfa515b10fa43b3f2aa37be8cc3" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.483812 4763 scope.go:117] "RemoveContainer" containerID="81513c65f4d1e32374bc191787bb3aa7b8c727dea8f10657186f441129c66b2d" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.523793 4763 scope.go:117] "RemoveContainer" containerID="86473714f4ce3e0c0546d3efd9d2a4c483b250ae2cfc71b869c9dd2d74212843" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.552144 4763 scope.go:117] "RemoveContainer" containerID="7ef2e01001160daffe9d73462285a7c68140d85927beaed547681090d459b02c" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.568749 4763 scope.go:117] "RemoveContainer" containerID="5fa126529ebfd9f8d654fdb4163dd9313138d45dbfe91ba77b7f2932c6bbe253" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.591029 4763 scope.go:117] "RemoveContainer" containerID="7714a8a6b79709126f364f0d62c56b1d3452f8a8fba6dcbd2a886d213e6d1539" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.608843 4763 scope.go:117] "RemoveContainer" containerID="6ad459f89c6daaca4d156837f9f29cdd273bffd3d92de05ff8883cc9c5daf3d5" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.622723 4763 scope.go:117] "RemoveContainer" containerID="c764195b9e13327744de1da5d6c325b96e398b0064358e355b484b95d5879006" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.639649 4763 scope.go:117] "RemoveContainer" containerID="ecd4554feb413e934a9bae078f649478d2be2b84816a89af93d5b1ce8db385d7" Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.718444 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fsn7"] Dec 06 08:14:27 crc kubenswrapper[4763]: W1206 08:14:27.729975 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61203368_9fce_4808_ae90_b4a955f5f893.slice/crio-8c9074827194a3cc6cea2747b475e12127a9eced6bfbc5f730fe34f13f9313f0 WatchSource:0}: Error finding container 8c9074827194a3cc6cea2747b475e12127a9eced6bfbc5f730fe34f13f9313f0: Status 404 returned error can't find the container with id 8c9074827194a3cc6cea2747b475e12127a9eced6bfbc5f730fe34f13f9313f0 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.795589 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hsbkg"] Dec 06 08:14:27 crc kubenswrapper[4763]: W1206 08:14:27.807191 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdc178a0_0a78_4b8b_85ba_32167fdbcf89.slice/crio-cb1b41921fc1e071347493bff1ac6a42caedf58c7024b3905aba7747822e6920 WatchSource:0}: Error finding container cb1b41921fc1e071347493bff1ac6a42caedf58c7024b3905aba7747822e6920: Status 404 returned error can't find the container with id cb1b41921fc1e071347493bff1ac6a42caedf58c7024b3905aba7747822e6920 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.855822 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg8kh"] Dec 06 08:14:27 crc kubenswrapper[4763]: W1206 08:14:27.863947 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6f5ed36_5b72_45e8_8aea_5715275f5f41.slice/crio-2d09fdfb3fcbe3923815dea0277075f2ddf909362d39f1f1509a9edbfb1a8817 WatchSource:0}: Error finding container 2d09fdfb3fcbe3923815dea0277075f2ddf909362d39f1f1509a9edbfb1a8817: Status 404 returned error can't find the container with id 2d09fdfb3fcbe3923815dea0277075f2ddf909362d39f1f1509a9edbfb1a8817 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.957078 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" event={"ID":"fdc178a0-0a78-4b8b-85ba-32167fdbcf89","Type":"ContainerStarted","Data":"cb1b41921fc1e071347493bff1ac6a42caedf58c7024b3905aba7747822e6920"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.962295 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-glwdn" event={"ID":"33bff8df-dbde-4ad7-8edc-18b8848bd87e","Type":"ContainerStarted","Data":"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.964614 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" event={"ID":"61203368-9fce-4808-ae90-b4a955f5f893","Type":"ContainerStarted","Data":"8c9074827194a3cc6cea2747b475e12127a9eced6bfbc5f730fe34f13f9313f0"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.966032 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg8kh" event={"ID":"a6f5ed36-5b72-45e8-8aea-5715275f5f41","Type":"ContainerStarted","Data":"2d09fdfb3fcbe3923815dea0277075f2ddf909362d39f1f1509a9edbfb1a8817"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.968451 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerStarted","Data":"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.968544 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gntcq" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="registry-server" containerID="cri-o://d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9" gracePeriod=30 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.972550 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerStarted","Data":"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.972636 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ssrkx" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="registry-server" containerID="cri-o://ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f" gracePeriod=30 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.978390 4763 generic.go:334] "Generic (PLEG): container finished" podID="20cce31d-dc9e-4669-830a-2663bde5c655" containerID="e4bae7b319d6bd158a69aaa4a839d800a52be7ad9ca662a24ee86b45718820d0" exitCode=0 Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.978451 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7gjl5" event={"ID":"20cce31d-dc9e-4669-830a-2663bde5c655","Type":"ContainerDied","Data":"e4bae7b319d6bd158a69aaa4a839d800a52be7ad9ca662a24ee86b45718820d0"} Dec 06 08:14:27 crc kubenswrapper[4763]: I1206 08:14:27.996005 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gntcq" podStartSLOduration=7.730268591 podStartE2EDuration="1m45.99598818s" podCreationTimestamp="2025-12-06 08:12:42 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.126538005 +0000 UTC m=+51.702243043" lastFinishedPulling="2025-12-06 08:14:27.392257594 +0000 UTC m=+149.967962632" observedRunningTime="2025-12-06 08:14:27.995760524 +0000 UTC m=+150.571465572" watchObservedRunningTime="2025-12-06 08:14:27.99598818 +0000 UTC m=+150.571693218" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.022652 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ssrkx" podStartSLOduration=4.878999499 podStartE2EDuration="1m43.022635631s" podCreationTimestamp="2025-12-06 08:12:45 +0000 UTC" firstStartedPulling="2025-12-06 08:12:49.191857519 +0000 UTC m=+51.767562557" lastFinishedPulling="2025-12-06 08:14:27.335493651 +0000 UTC m=+149.911198689" observedRunningTime="2025-12-06 08:14:28.017706529 +0000 UTC m=+150.593411577" watchObservedRunningTime="2025-12-06 08:14:28.022635631 +0000 UTC m=+150.598340669" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.175266 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.308694 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pcdd\" (UniqueName: \"kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd\") pod \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.308821 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content\") pod \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.308865 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities\") pod \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\" (UID: \"33bff8df-dbde-4ad7-8edc-18b8848bd87e\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.309652 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities" (OuterVolumeSpecName: "utilities") pod "33bff8df-dbde-4ad7-8edc-18b8848bd87e" (UID: "33bff8df-dbde-4ad7-8edc-18b8848bd87e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.314353 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd" (OuterVolumeSpecName: "kube-api-access-6pcdd") pod "33bff8df-dbde-4ad7-8edc-18b8848bd87e" (UID: "33bff8df-dbde-4ad7-8edc-18b8848bd87e"). InnerVolumeSpecName "kube-api-access-6pcdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.349860 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.356123 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gntcq_49651d40-4e4d-442a-9421-ed157e45ce24/registry-server/0.log" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.356956 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.364958 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33bff8df-dbde-4ad7-8edc-18b8848bd87e" (UID: "33bff8df-dbde-4ad7-8edc-18b8848bd87e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.409575 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content\") pod \"20cce31d-dc9e-4669-830a-2663bde5c655\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.409636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities\") pod \"20cce31d-dc9e-4669-830a-2663bde5c655\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.409701 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc2f6\" (UniqueName: \"kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6\") pod \"20cce31d-dc9e-4669-830a-2663bde5c655\" (UID: \"20cce31d-dc9e-4669-830a-2663bde5c655\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.410049 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.410071 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33bff8df-dbde-4ad7-8edc-18b8848bd87e-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.410083 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pcdd\" (UniqueName: \"kubernetes.io/projected/33bff8df-dbde-4ad7-8edc-18b8848bd87e-kube-api-access-6pcdd\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.412706 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6" (OuterVolumeSpecName: "kube-api-access-bc2f6") pod "20cce31d-dc9e-4669-830a-2663bde5c655" (UID: "20cce31d-dc9e-4669-830a-2663bde5c655"). InnerVolumeSpecName "kube-api-access-bc2f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.413528 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities" (OuterVolumeSpecName: "utilities") pod "20cce31d-dc9e-4669-830a-2663bde5c655" (UID: "20cce31d-dc9e-4669-830a-2663bde5c655"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.462712 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20cce31d-dc9e-4669-830a-2663bde5c655" (UID: "20cce31d-dc9e-4669-830a-2663bde5c655"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.510821 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities\") pod \"49651d40-4e4d-442a-9421-ed157e45ce24\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.510914 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75d7t\" (UniqueName: \"kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t\") pod \"49651d40-4e4d-442a-9421-ed157e45ce24\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.510979 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content\") pod \"49651d40-4e4d-442a-9421-ed157e45ce24\" (UID: \"49651d40-4e4d-442a-9421-ed157e45ce24\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.511203 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.511220 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20cce31d-dc9e-4669-830a-2663bde5c655-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.511229 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc2f6\" (UniqueName: \"kubernetes.io/projected/20cce31d-dc9e-4669-830a-2663bde5c655-kube-api-access-bc2f6\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.513609 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities" (OuterVolumeSpecName: "utilities") pod "49651d40-4e4d-442a-9421-ed157e45ce24" (UID: "49651d40-4e4d-442a-9421-ed157e45ce24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.515408 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t" (OuterVolumeSpecName: "kube-api-access-75d7t") pod "49651d40-4e4d-442a-9421-ed157e45ce24" (UID: "49651d40-4e4d-442a-9421-ed157e45ce24"). InnerVolumeSpecName "kube-api-access-75d7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.561351 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49651d40-4e4d-442a-9421-ed157e45ce24" (UID: "49651d40-4e4d-442a-9421-ed157e45ce24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.612549 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.612598 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49651d40-4e4d-442a-9421-ed157e45ce24-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.612619 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75d7t\" (UniqueName: \"kubernetes.io/projected/49651d40-4e4d-442a-9421-ed157e45ce24-kube-api-access-75d7t\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.745137 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ssrkx_9cf352a5-4869-4b34-951c-ccd1c1da1fb2/registry-server/0.log" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.745797 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.816129 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities\") pod \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.816212 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content\") pod \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.816244 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2skqf\" (UniqueName: \"kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf\") pod \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\" (UID: \"9cf352a5-4869-4b34-951c-ccd1c1da1fb2\") " Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.817590 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities" (OuterVolumeSpecName: "utilities") pod "9cf352a5-4869-4b34-951c-ccd1c1da1fb2" (UID: "9cf352a5-4869-4b34-951c-ccd1c1da1fb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.820018 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf" (OuterVolumeSpecName: "kube-api-access-2skqf") pod "9cf352a5-4869-4b34-951c-ccd1c1da1fb2" (UID: "9cf352a5-4869-4b34-951c-ccd1c1da1fb2"). InnerVolumeSpecName "kube-api-access-2skqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.914143 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cf352a5-4869-4b34-951c-ccd1c1da1fb2" (UID: "9cf352a5-4869-4b34-951c-ccd1c1da1fb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.918103 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.918133 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.918148 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2skqf\" (UniqueName: \"kubernetes.io/projected/9cf352a5-4869-4b34-951c-ccd1c1da1fb2-kube-api-access-2skqf\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.986561 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" event={"ID":"61203368-9fce-4808-ae90-b4a955f5f893","Type":"ContainerStarted","Data":"147d1c16acaed36275b311a16c4aab07fa2bd81d9044065e2ae5c5cff19202b0"} Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.987527 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.990714 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7gjl5" event={"ID":"20cce31d-dc9e-4669-830a-2663bde5c655","Type":"ContainerDied","Data":"285e1e7861de1b9a011361f914ffa69f946fa599e52d2dbda65214a8484cf7cf"} Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.990772 4763 scope.go:117] "RemoveContainer" containerID="e4bae7b319d6bd158a69aaa4a839d800a52be7ad9ca662a24ee86b45718820d0" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.990885 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7gjl5" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.990949 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.993853 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gntcq_49651d40-4e4d-442a-9421-ed157e45ce24/registry-server/0.log" Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.995728 4763 generic.go:334] "Generic (PLEG): container finished" podID="49651d40-4e4d-442a-9421-ed157e45ce24" containerID="d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9" exitCode=1 Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.995785 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerDied","Data":"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9"} Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.995814 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gntcq" event={"ID":"49651d40-4e4d-442a-9421-ed157e45ce24","Type":"ContainerDied","Data":"6175e615fb141ca87af51bebcc09c61efdc1d1913ffe3856d5da28a95b6b7f6f"} Dec 06 08:14:28 crc kubenswrapper[4763]: I1206 08:14:28.995863 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gntcq" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.005411 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-4fsn7" podStartSLOduration=11.005390473 podStartE2EDuration="11.005390473s" podCreationTimestamp="2025-12-06 08:14:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:14:29.00289176 +0000 UTC m=+151.578596798" watchObservedRunningTime="2025-12-06 08:14:29.005390473 +0000 UTC m=+151.581095511" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.008463 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" event={"ID":"fdc178a0-0a78-4b8b-85ba-32167fdbcf89","Type":"ContainerStarted","Data":"496e6117ef8826b594597772004d5274bacb984e20ab125ee999d3d4397a39ec"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.011051 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.011185 4763 scope.go:117] "RemoveContainer" containerID="6e2ca18f0320f75d3e3f80abaa550eb633d55e5783cf40a29263f5ad9cb25055" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.013241 4763 generic.go:334] "Generic (PLEG): container finished" podID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerID="143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8" exitCode=0 Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.013369 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-glwdn" event={"ID":"33bff8df-dbde-4ad7-8edc-18b8848bd87e","Type":"ContainerDied","Data":"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.013400 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-glwdn" event={"ID":"33bff8df-dbde-4ad7-8edc-18b8848bd87e","Type":"ContainerDied","Data":"e19ac4749f3da2571409810f1459ec3e0ee5664b00907412e1333bc5c2be4f5b"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.014744 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ssrkx_9cf352a5-4869-4b34-951c-ccd1c1da1fb2/registry-server/0.log" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.015556 4763 generic.go:334] "Generic (PLEG): container finished" podID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerID="ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f" exitCode=1 Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.015620 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerDied","Data":"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.015645 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssrkx" event={"ID":"9cf352a5-4869-4b34-951c-ccd1c1da1fb2","Type":"ContainerDied","Data":"f4c32c38bdd0f56773ddc8fdb762a14006109d96f63927a06263ec4cc3eead89"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.015725 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssrkx" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.016245 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-glwdn" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.018207 4763 generic.go:334] "Generic (PLEG): container finished" podID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" containerID="34784cae6c8902ab934aa3a7421f6ba08034e371a993071d072a378a2b2fe836" exitCode=0 Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.018241 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg8kh" event={"ID":"a6f5ed36-5b72-45e8-8aea-5715275f5f41","Type":"ContainerDied","Data":"34784cae6c8902ab934aa3a7421f6ba08034e371a993071d072a378a2b2fe836"} Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.045641 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" podStartSLOduration=17.045617137 podStartE2EDuration="17.045617137s" podCreationTimestamp="2025-12-06 08:14:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:14:29.041753574 +0000 UTC m=+151.617458622" watchObservedRunningTime="2025-12-06 08:14:29.045617137 +0000 UTC m=+151.621322175" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.076646 4763 scope.go:117] "RemoveContainer" containerID="d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.093135 4763 scope.go:117] "RemoveContainer" containerID="8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.104367 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.108747 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7gjl5"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.112285 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.115685 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gntcq"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.117475 4763 scope.go:117] "RemoveContainer" containerID="b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.138104 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.138278 4763 scope.go:117] "RemoveContainer" containerID="d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.138803 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9\": container with ID starting with d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9 not found: ID does not exist" containerID="d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.138981 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9"} err="failed to get container status \"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9\": rpc error: code = NotFound desc = could not find container \"d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9\": container with ID starting with d025d86bc5c57d26983de6a5860d3429b014ed912ebcfb7d07cc9985ac24a6b9 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.139039 4763 scope.go:117] "RemoveContainer" containerID="8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.139410 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061\": container with ID starting with 8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061 not found: ID does not exist" containerID="8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.139441 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061"} err="failed to get container status \"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061\": rpc error: code = NotFound desc = could not find container \"8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061\": container with ID starting with 8c3840af680cac6eeda9f6fa060cd597d747ccf4af89bb76e18e1dd7acad7061 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.139460 4763 scope.go:117] "RemoveContainer" containerID="b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.139672 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5\": container with ID starting with b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5 not found: ID does not exist" containerID="b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.139730 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5"} err="failed to get container status \"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5\": rpc error: code = NotFound desc = could not find container \"b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5\": container with ID starting with b27cf5a3dfa530f4b9522f074e47f1b9f60905d545a22c5926ef9f3eb2bc05f5 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.139750 4763 scope.go:117] "RemoveContainer" containerID="143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.143873 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-glwdn"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.153436 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.157119 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ssrkx"] Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.157378 4763 scope.go:117] "RemoveContainer" containerID="9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.169533 4763 scope.go:117] "RemoveContainer" containerID="143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.169922 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8\": container with ID starting with 143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8 not found: ID does not exist" containerID="143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.169955 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8"} err="failed to get container status \"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8\": rpc error: code = NotFound desc = could not find container \"143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8\": container with ID starting with 143153030b60ff960cea3dc25e12b1ca9b4478bf913ef5d929eb5e273e046da8 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.169986 4763 scope.go:117] "RemoveContainer" containerID="9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.170262 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08\": container with ID starting with 9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08 not found: ID does not exist" containerID="9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.170292 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08"} err="failed to get container status \"9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08\": rpc error: code = NotFound desc = could not find container \"9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08\": container with ID starting with 9ca2df7a57b699fd688192ebc30029c598670a20ae005e1d98c380301bb6ab08 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.170305 4763 scope.go:117] "RemoveContainer" containerID="ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.181338 4763 scope.go:117] "RemoveContainer" containerID="c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.193221 4763 scope.go:117] "RemoveContainer" containerID="3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.204491 4763 scope.go:117] "RemoveContainer" containerID="ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.204876 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f\": container with ID starting with ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f not found: ID does not exist" containerID="ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.204929 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f"} err="failed to get container status \"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f\": rpc error: code = NotFound desc = could not find container \"ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f\": container with ID starting with ee881068d69132ad9067e96d794355cfb62bf12768901b25d8bc4d1d8bd2cb1f not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.204959 4763 scope.go:117] "RemoveContainer" containerID="c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.205258 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc\": container with ID starting with c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc not found: ID does not exist" containerID="c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.205281 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc"} err="failed to get container status \"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc\": rpc error: code = NotFound desc = could not find container \"c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc\": container with ID starting with c6367e6874bb323a9acca5f7fe6118858b8276674e635dc08a550b20cc593bbc not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.205297 4763 scope.go:117] "RemoveContainer" containerID="3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884" Dec 06 08:14:29 crc kubenswrapper[4763]: E1206 08:14:29.207548 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884\": container with ID starting with 3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884 not found: ID does not exist" containerID="3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.207575 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884"} err="failed to get container status \"3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884\": rpc error: code = NotFound desc = could not find container \"3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884\": container with ID starting with 3f5dd8ab6f20d3cade819b1de99b023ece04a849fa8cbdab4922e81e494ab884 not found: ID does not exist" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.726786 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" path="/var/lib/kubelet/pods/20cce31d-dc9e-4669-830a-2663bde5c655/volumes" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.727684 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" path="/var/lib/kubelet/pods/33bff8df-dbde-4ad7-8edc-18b8848bd87e/volumes" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.728682 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" path="/var/lib/kubelet/pods/49651d40-4e4d-442a-9421-ed157e45ce24/volumes" Dec 06 08:14:29 crc kubenswrapper[4763]: I1206 08:14:29.737052 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" path="/var/lib/kubelet/pods/9cf352a5-4869-4b34-951c-ccd1c1da1fb2/volumes" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.031949 4763 generic.go:334] "Generic (PLEG): container finished" podID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" containerID="209bb6b3e57a985ed248f3360519856e0007185722c15d3099a3895abef6b499" exitCode=0 Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.032037 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg8kh" event={"ID":"a6f5ed36-5b72-45e8-8aea-5715275f5f41","Type":"ContainerDied","Data":"209bb6b3e57a985ed248f3360519856e0007185722c15d3099a3895abef6b499"} Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161097 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kvt7x"] Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161380 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161408 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161432 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161444 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161463 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161477 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161494 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161505 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161518 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161529 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161549 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161561 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="extract-utilities" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161579 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161592 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161605 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161617 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161643 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161655 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: E1206 08:14:31.161678 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161691 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161846 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="33bff8df-dbde-4ad7-8edc-18b8848bd87e" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161865 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf352a5-4869-4b34-951c-ccd1c1da1fb2" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161885 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="49651d40-4e4d-442a-9421-ed157e45ce24" containerName="registry-server" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.161955 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="20cce31d-dc9e-4669-830a-2663bde5c655" containerName="extract-content" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.163086 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.165107 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.176330 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kvt7x"] Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.249048 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-utilities\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.249316 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btqb9\" (UniqueName: \"kubernetes.io/projected/31406d5a-2fb1-4c58-a333-8decda95ca2a-kube-api-access-btqb9\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.249386 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-catalog-content\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.350734 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-catalog-content\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.350795 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-utilities\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.350866 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btqb9\" (UniqueName: \"kubernetes.io/projected/31406d5a-2fb1-4c58-a333-8decda95ca2a-kube-api-access-btqb9\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.351266 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-catalog-content\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.351337 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31406d5a-2fb1-4c58-a333-8decda95ca2a-utilities\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.370391 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btqb9\" (UniqueName: \"kubernetes.io/projected/31406d5a-2fb1-4c58-a333-8decda95ca2a-kube-api-access-btqb9\") pod \"certified-operators-kvt7x\" (UID: \"31406d5a-2fb1-4c58-a333-8decda95ca2a\") " pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.481412 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:31 crc kubenswrapper[4763]: I1206 08:14:31.893593 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kvt7x"] Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.049287 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kvt7x" event={"ID":"31406d5a-2fb1-4c58-a333-8decda95ca2a","Type":"ContainerStarted","Data":"e93944a876cd9551f35333c1226cddf161ce2f13267391e860f58d2e9aa6e34c"} Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.177909 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w6sjn"] Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.179414 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.180562 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6sjn"] Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.182508 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.265775 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fhxs\" (UniqueName: \"kubernetes.io/projected/7dac1917-a2ac-4485-93ff-011dd58fcab7-kube-api-access-7fhxs\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.265865 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-catalog-content\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.265993 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-utilities\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.367233 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-catalog-content\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.367317 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-utilities\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.367339 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fhxs\" (UniqueName: \"kubernetes.io/projected/7dac1917-a2ac-4485-93ff-011dd58fcab7-kube-api-access-7fhxs\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.367825 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-utilities\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.368397 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac1917-a2ac-4485-93ff-011dd58fcab7-catalog-content\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.393047 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fhxs\" (UniqueName: \"kubernetes.io/projected/7dac1917-a2ac-4485-93ff-011dd58fcab7-kube-api-access-7fhxs\") pod \"redhat-operators-w6sjn\" (UID: \"7dac1917-a2ac-4485-93ff-011dd58fcab7\") " pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.508566 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:32 crc kubenswrapper[4763]: I1206 08:14:32.938676 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6sjn"] Dec 06 08:14:32 crc kubenswrapper[4763]: W1206 08:14:32.943928 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dac1917_a2ac_4485_93ff_011dd58fcab7.slice/crio-e6351bbe3f324638ee88e7fe471dc70db18f8f474b63076ff853fd04e76f00ff WatchSource:0}: Error finding container e6351bbe3f324638ee88e7fe471dc70db18f8f474b63076ff853fd04e76f00ff: Status 404 returned error can't find the container with id e6351bbe3f324638ee88e7fe471dc70db18f8f474b63076ff853fd04e76f00ff Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.055548 4763 generic.go:334] "Generic (PLEG): container finished" podID="31406d5a-2fb1-4c58-a333-8decda95ca2a" containerID="57177e3800b0772f05bb993534f48fe82513b68b8374bf354b6a604bb4c550aa" exitCode=0 Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.055654 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kvt7x" event={"ID":"31406d5a-2fb1-4c58-a333-8decda95ca2a","Type":"ContainerDied","Data":"57177e3800b0772f05bb993534f48fe82513b68b8374bf354b6a604bb4c550aa"} Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.057957 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6sjn" event={"ID":"7dac1917-a2ac-4485-93ff-011dd58fcab7","Type":"ContainerStarted","Data":"e6351bbe3f324638ee88e7fe471dc70db18f8f474b63076ff853fd04e76f00ff"} Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.559458 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r5rgv"] Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.560842 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.562367 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.569484 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r5rgv"] Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.686178 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlp2d\" (UniqueName: \"kubernetes.io/projected/29279517-9ad6-4afc-9cfb-a895652124ed-kube-api-access-jlp2d\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.686268 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-utilities\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.686295 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-catalog-content\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.787738 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlp2d\" (UniqueName: \"kubernetes.io/projected/29279517-9ad6-4afc-9cfb-a895652124ed-kube-api-access-jlp2d\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.787839 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-utilities\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.787861 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-catalog-content\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.789206 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-utilities\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.789345 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29279517-9ad6-4afc-9cfb-a895652124ed-catalog-content\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.809055 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlp2d\" (UniqueName: \"kubernetes.io/projected/29279517-9ad6-4afc-9cfb-a895652124ed-kube-api-access-jlp2d\") pod \"community-operators-r5rgv\" (UID: \"29279517-9ad6-4afc-9cfb-a895652124ed\") " pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:33 crc kubenswrapper[4763]: I1206 08:14:33.877930 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.065523 4763 generic.go:334] "Generic (PLEG): container finished" podID="7dac1917-a2ac-4485-93ff-011dd58fcab7" containerID="725cbaf6a09d1e67879586c8ad9f96617ee5c89f6a552448f09bf0d9d72c2ea8" exitCode=0 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.065577 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6sjn" event={"ID":"7dac1917-a2ac-4485-93ff-011dd58fcab7","Type":"ContainerDied","Data":"725cbaf6a09d1e67879586c8ad9f96617ee5c89f6a552448f09bf0d9d72c2ea8"} Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.069131 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg8kh" event={"ID":"a6f5ed36-5b72-45e8-8aea-5715275f5f41","Type":"ContainerStarted","Data":"df6648bbd41264e43f202a668571be1e8a28484c1a5d597acb05790840b71b45"} Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.115966 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kg8kh" podStartSLOduration=6.422748247 podStartE2EDuration="10.115946753s" podCreationTimestamp="2025-12-06 08:14:24 +0000 UTC" firstStartedPulling="2025-12-06 08:14:29.068185539 +0000 UTC m=+151.643890577" lastFinishedPulling="2025-12-06 08:14:32.761384055 +0000 UTC m=+155.337089083" observedRunningTime="2025-12-06 08:14:34.113680267 +0000 UTC m=+156.689385305" watchObservedRunningTime="2025-12-06 08:14:34.115946753 +0000 UTC m=+156.691651791" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.266780 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r5rgv"] Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.287173 4763 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.287932 4763 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.287986 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.288198 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493" gracePeriod=15 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.288711 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e" gracePeriod=15 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.288731 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241" gracePeriod=15 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.288781 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838" gracePeriod=15 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.288788 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8" gracePeriod=15 Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289204 4763 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289362 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289373 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289385 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289392 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289399 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289405 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289414 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289420 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289429 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289435 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289447 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289452 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289545 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289554 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289560 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289569 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289577 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289587 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.289671 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.289677 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.396952 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397269 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397289 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397306 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397351 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397383 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397410 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.397442 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: E1206 08:14:34.409782 4763 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.18:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-r5rgv.187e9238487aa0e1 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-r5rgv,UID:29279517-9ad6-4afc-9cfb-a895652124ed,APIVersion:v1,ResourceVersion:29396,FieldPath:spec.initContainers{extract-utilities},},Reason:Created,Message:Created container extract-utilities,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,LastTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498541 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498600 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498627 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498646 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498684 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498687 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498763 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498807 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498837 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498721 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498893 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498974 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.498997 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.499058 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.499089 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.499124 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.572338 4763 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 06 08:14:34 crc kubenswrapper[4763]: I1206 08:14:34.572440 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.073948 4763 generic.go:334] "Generic (PLEG): container finished" podID="31406d5a-2fb1-4c58-a333-8decda95ca2a" containerID="6053a7a083e8509412ed7fa932e32e5e7c309e39ed6a2cd5b42b82be86b0e0bb" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.074042 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kvt7x" event={"ID":"31406d5a-2fb1-4c58-a333-8decda95ca2a","Type":"ContainerDied","Data":"6053a7a083e8509412ed7fa932e32e5e7c309e39ed6a2cd5b42b82be86b0e0bb"} Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.075083 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.075319 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.076445 4763 generic.go:334] "Generic (PLEG): container finished" podID="29279517-9ad6-4afc-9cfb-a895652124ed" containerID="843d33a6f71c8d61d603104dc1680cbd44abe86a529e7dcc3edb84c4b1529259" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.076486 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5rgv" event={"ID":"29279517-9ad6-4afc-9cfb-a895652124ed","Type":"ContainerDied","Data":"843d33a6f71c8d61d603104dc1680cbd44abe86a529e7dcc3edb84c4b1529259"} Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.076508 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5rgv" event={"ID":"29279517-9ad6-4afc-9cfb-a895652124ed","Type":"ContainerStarted","Data":"9eca177ad42739cbd8449bacb3edd7b5f4705eab36cc98b6a4464551dcd1c943"} Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.077267 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.077406 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.077581 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.079932 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6sjn" event={"ID":"7dac1917-a2ac-4485-93ff-011dd58fcab7","Type":"ContainerStarted","Data":"e26d3d2d6426a0c18fc3b81d458af08e2d322d8994646214a94add11c1a64c99"} Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.080636 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.080800 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.081035 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.081340 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.082064 4763 generic.go:334] "Generic (PLEG): container finished" podID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" containerID="57170eaa08209b9ed88fdf61ea729946d3c83860c93e0b28dc641f6f9e9f0b1f" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.082128 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"27ded1af-d1b2-44f7-899e-12946c3e3a8e","Type":"ContainerDied","Data":"57170eaa08209b9ed88fdf61ea729946d3c83860c93e0b28dc641f6f9e9f0b1f"} Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.082469 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.082690 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.082937 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.083271 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.083468 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.084150 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.085166 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.085663 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.085682 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.085690 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241" exitCode=0 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.085699 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838" exitCode=2 Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.086329 4763 scope.go:117] "RemoveContainer" containerID="27e5fc04ffa8dcc66d782f78ca48a33fec621058663fecf3bc284330c5db1690" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.282479 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.282581 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.335074 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.335950 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.336383 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.336867 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.337271 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.337547 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:35 crc kubenswrapper[4763]: I1206 08:14:35.337820 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.096107 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kvt7x" event={"ID":"31406d5a-2fb1-4c58-a333-8decda95ca2a","Type":"ContainerStarted","Data":"5d398e17f974444813fe329e5044b7a27c8f82988ac009724efa082a41acc467"} Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.097864 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.098078 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.098338 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.098712 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.098940 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.100853 4763 generic.go:334] "Generic (PLEG): container finished" podID="7dac1917-a2ac-4485-93ff-011dd58fcab7" containerID="e26d3d2d6426a0c18fc3b81d458af08e2d322d8994646214a94add11c1a64c99" exitCode=0 Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.100920 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6sjn" event={"ID":"7dac1917-a2ac-4485-93ff-011dd58fcab7","Type":"ContainerDied","Data":"e26d3d2d6426a0c18fc3b81d458af08e2d322d8994646214a94add11c1a64c99"} Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.101525 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.101671 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.101813 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.101996 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.102188 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:36 crc kubenswrapper[4763]: I1206 08:14:36.106036 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.005057 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.006111 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.006414 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.006634 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.006852 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.007124 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.013514 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.014297 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.014795 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.015070 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.015349 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.015541 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.015866 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.016269 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.113259 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"27ded1af-d1b2-44f7-899e-12946c3e3a8e","Type":"ContainerDied","Data":"e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215"} Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.113618 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e07f3466609b6f640a0c571052c227bd8f5dddb509e9091ab3e6d8af4cc9d215" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.113301 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.121483 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.122087 4763 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493" exitCode=0 Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.122145 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.122180 4763 scope.go:117] "RemoveContainer" containerID="1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.123965 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5rgv" event={"ID":"29279517-9ad6-4afc-9cfb-a895652124ed","Type":"ContainerStarted","Data":"33d046beb7c9bb2ea2a79943375e53654b95b79370e7bc3fb1111441b46900f8"} Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.124473 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.124681 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.124932 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.125140 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.125359 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.125561 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.126137 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6sjn" event={"ID":"7dac1917-a2ac-4485-93ff-011dd58fcab7","Type":"ContainerStarted","Data":"13722c8cf50386bd8f26aaf21da379c73659d76755720dcc9fa01e5a001f0af5"} Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.126542 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.127044 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.127290 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.127542 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.127755 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.127996 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134653 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock\") pod \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134763 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134787 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134820 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134832 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access\") pod \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134879 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134818 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock" (OuterVolumeSpecName: "var-lock") pod "27ded1af-d1b2-44f7-899e-12946c3e3a8e" (UID: "27ded1af-d1b2-44f7-899e-12946c3e3a8e"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.134989 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir\") pod \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\" (UID: \"27ded1af-d1b2-44f7-899e-12946c3e3a8e\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135027 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135359 4763 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-var-lock\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135375 4763 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135384 4763 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135419 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.135449 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "27ded1af-d1b2-44f7-899e-12946c3e3a8e" (UID: "27ded1af-d1b2-44f7-899e-12946c3e3a8e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.141047 4763 scope.go:117] "RemoveContainer" containerID="40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.153494 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "27ded1af-d1b2-44f7-899e-12946c3e3a8e" (UID: "27ded1af-d1b2-44f7-899e-12946c3e3a8e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.162057 4763 scope.go:117] "RemoveContainer" containerID="98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.175177 4763 scope.go:117] "RemoveContainer" containerID="69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.191183 4763 scope.go:117] "RemoveContainer" containerID="63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.215952 4763 scope.go:117] "RemoveContainer" containerID="2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.237009 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.237033 4763 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/27ded1af-d1b2-44f7-899e-12946c3e3a8e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.237043 4763 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.243269 4763 scope.go:117] "RemoveContainer" containerID="1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.244312 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\": container with ID starting with 1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e not found: ID does not exist" containerID="1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.244353 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e"} err="failed to get container status \"1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\": rpc error: code = NotFound desc = could not find container \"1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e\": container with ID starting with 1f37a590bbc882ebaafae125e321a3597f39d6ac078c21500be3e48a4a78b15e not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.244381 4763 scope.go:117] "RemoveContainer" containerID="40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.244979 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\": container with ID starting with 40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8 not found: ID does not exist" containerID="40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245003 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8"} err="failed to get container status \"40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\": rpc error: code = NotFound desc = could not find container \"40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8\": container with ID starting with 40565231a6641b4042c1a3510b74a3362a759301d153fd9eb0ec91bbbcbcd4b8 not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245018 4763 scope.go:117] "RemoveContainer" containerID="98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.245301 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\": container with ID starting with 98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241 not found: ID does not exist" containerID="98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245341 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241"} err="failed to get container status \"98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\": rpc error: code = NotFound desc = could not find container \"98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241\": container with ID starting with 98e38fbc1a74b03fd63d36ef3e07c60e707f4c88c66471b270e5fd020488c241 not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245366 4763 scope.go:117] "RemoveContainer" containerID="69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.245588 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\": container with ID starting with 69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838 not found: ID does not exist" containerID="69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245613 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838"} err="failed to get container status \"69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\": rpc error: code = NotFound desc = could not find container \"69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838\": container with ID starting with 69e5691cc542ecd5f00f5f2a298e9f412cf68f8e765f18e3991e344bb7213838 not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245627 4763 scope.go:117] "RemoveContainer" containerID="63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.245815 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\": container with ID starting with 63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493 not found: ID does not exist" containerID="63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245838 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493"} err="failed to get container status \"63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\": rpc error: code = NotFound desc = could not find container \"63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493\": container with ID starting with 63c41b7df6965ba893999fb91300615881f4a3ce5b9f297ddccc29923ddb2493 not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.245854 4763 scope.go:117] "RemoveContainer" containerID="2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.246011 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\": container with ID starting with 2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6 not found: ID does not exist" containerID="2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.246032 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6"} err="failed to get container status \"2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\": rpc error: code = NotFound desc = could not find container \"2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6\": container with ID starting with 2ca0fde061693320ce094608dec58024b3904569b285590785f3048dd19ff1e6 not found: ID does not exist" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.426909 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.428597 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.428866 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.429184 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.429522 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.429728 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.437018 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.437589 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.438147 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.438369 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.438624 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.438829 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: E1206 08:14:37.596587 4763 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.18:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-r5rgv.187e9238487aa0e1 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-r5rgv,UID:29279517-9ad6-4afc-9cfb-a895652124ed,APIVersion:v1,ResourceVersion:29396,FieldPath:spec.initContainers{extract-utilities},},Reason:Created,Message:Created container extract-utilities,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,LastTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.722099 4763 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.722473 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.722844 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.723153 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.723470 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.723773 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:37 crc kubenswrapper[4763]: I1206 08:14:37.726217 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.134331 4763 generic.go:334] "Generic (PLEG): container finished" podID="29279517-9ad6-4afc-9cfb-a895652124ed" containerID="33d046beb7c9bb2ea2a79943375e53654b95b79370e7bc3fb1111441b46900f8" exitCode=0 Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.134388 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5rgv" event={"ID":"29279517-9ad6-4afc-9cfb-a895652124ed","Type":"ContainerDied","Data":"33d046beb7c9bb2ea2a79943375e53654b95b79370e7bc3fb1111441b46900f8"} Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.135666 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.135913 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.136070 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.136205 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:38 crc kubenswrapper[4763]: I1206 08:14:38.136391 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.105300 4763 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.106150 4763 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.106386 4763 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.106629 4763 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.106923 4763 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.106962 4763 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.107233 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="200ms" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.143741 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5rgv" event={"ID":"29279517-9ad6-4afc-9cfb-a895652124ed","Type":"ContainerStarted","Data":"f4827360665c4fdedb89caf907d611d3369e204861085f76593c0318bb9ccd37"} Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.144551 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.144876 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.145210 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.145486 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.145727 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.308164 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="400ms" Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.326014 4763 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.18:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:39 crc kubenswrapper[4763]: I1206 08:14:39.326523 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:39 crc kubenswrapper[4763]: W1206 08:14:39.349514 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-7bf785584e39ea4e07b771df1d0cf0b480bc424fa1310d7031b53f18b0a935ee WatchSource:0}: Error finding container 7bf785584e39ea4e07b771df1d0cf0b480bc424fa1310d7031b53f18b0a935ee: Status 404 returned error can't find the container with id 7bf785584e39ea4e07b771df1d0cf0b480bc424fa1310d7031b53f18b0a935ee Dec 06 08:14:39 crc kubenswrapper[4763]: E1206 08:14:39.709269 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="800ms" Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.149891 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f"} Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.149969 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7bf785584e39ea4e07b771df1d0cf0b480bc424fa1310d7031b53f18b0a935ee"} Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.150652 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:40 crc kubenswrapper[4763]: E1206 08:14:40.150727 4763 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.18:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.151126 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.151532 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.151827 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:40 crc kubenswrapper[4763]: I1206 08:14:40.152182 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:40 crc kubenswrapper[4763]: E1206 08:14:40.510251 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="1.6s" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.482205 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.482557 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.580834 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.581460 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.581676 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.582343 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.582750 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:41 crc kubenswrapper[4763]: I1206 08:14:41.582972 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: E1206 08:14:42.111795 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="3.2s" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.211575 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kvt7x" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.212149 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.212495 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.212654 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.212805 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.212973 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.384532 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.385119 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.385505 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.385854 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.386151 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.386479 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.386737 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: E1206 08:14:42.481041 4763 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.18:6443: connect: connection refused" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" volumeName="registry-storage" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.508928 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.508974 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.537146 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.537225 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.554323 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.554869 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.555110 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.555443 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.555665 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.555864 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:42 crc kubenswrapper[4763]: I1206 08:14:42.556107 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.202588 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w6sjn" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.203085 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.203609 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.204190 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.204629 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.204948 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.205243 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.878757 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.880026 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.924119 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.924534 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.924858 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.925281 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.925439 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.925598 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:43 crc kubenswrapper[4763]: I1206 08:14:43.925740 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.025037 4763 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T08:14:44Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T08:14:44Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T08:14:44Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-06T08:14:44Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.025495 4763 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.025892 4763 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.026199 4763 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.026430 4763 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: E1206 08:14:44.026456 4763 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.220649 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r5rgv" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.222556 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.222918 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.223174 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.223538 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.223876 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.224154 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:44 crc kubenswrapper[4763]: I1206 08:14:44.943501 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" containerID="cri-o://a92efb1a1057138e4569df3e062be09f070a912ac3c306b48fde3bd5a60ec382" gracePeriod=15 Dec 06 08:14:45 crc kubenswrapper[4763]: E1206 08:14:45.314309 4763 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.18:6443: connect: connection refused" interval="6.4s" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.325037 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kg8kh" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.325463 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.325778 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.326251 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.326652 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.327016 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:45 crc kubenswrapper[4763]: I1206 08:14:45.327296 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.109014 4763 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lrzmx container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.109886 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.719264 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.720129 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.720429 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.720775 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.721107 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.721326 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.721601 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.731258 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.731305 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:46 crc kubenswrapper[4763]: E1206 08:14:46.731666 4763 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:46 crc kubenswrapper[4763]: I1206 08:14:46.732211 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:46 crc kubenswrapper[4763]: W1206 08:14:46.749266 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-b545a262e25eccd35b9bb0058ae562e8cb87c561f506d04d521c99f7938084b6 WatchSource:0}: Error finding container b545a262e25eccd35b9bb0058ae562e8cb87c561f506d04d521c99f7938084b6: Status 404 returned error can't find the container with id b545a262e25eccd35b9bb0058ae562e8cb87c561f506d04d521c99f7938084b6 Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.190385 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b545a262e25eccd35b9bb0058ae562e8cb87c561f506d04d521c99f7938084b6"} Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.192394 4763 generic.go:334] "Generic (PLEG): container finished" podID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerID="a92efb1a1057138e4569df3e062be09f070a912ac3c306b48fde3bd5a60ec382" exitCode=0 Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.192480 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" event={"ID":"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce","Type":"ContainerDied","Data":"a92efb1a1057138e4569df3e062be09f070a912ac3c306b48fde3bd5a60ec382"} Dec 06 08:14:47 crc kubenswrapper[4763]: E1206 08:14:47.597421 4763 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.18:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-r5rgv.187e9238487aa0e1 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-r5rgv,UID:29279517-9ad6-4afc-9cfb-a895652124ed,APIVersion:v1,ResourceVersion:29396,FieldPath:spec.initContainers{extract-utilities},},Reason:Created,Message:Created container extract-utilities,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,LastTimestamp:2025-12-06 08:14:34.407633121 +0000 UTC m=+156.983338159,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.723694 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.725136 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.725576 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.725918 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.726191 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.726541 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:47 crc kubenswrapper[4763]: I1206 08:14:47.726870 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.411476 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.412179 4763 status_manager.go:851] "Failed to get status for pod" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-lrzmx\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.412600 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.413013 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.413503 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.413778 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.414145 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.414600 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.415091 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478259 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478364 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478417 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478465 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478536 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478565 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478585 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478607 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478626 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478649 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g4dd\" (UniqueName: \"kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478685 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478719 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478798 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.478825 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca\") pod \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\" (UID: \"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce\") " Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.479985 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.480316 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.480311 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.480587 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.481082 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.485351 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.485681 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd" (OuterVolumeSpecName: "kube-api-access-8g4dd") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "kube-api-access-8g4dd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.485986 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.486220 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.488627 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.489401 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.489491 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.489848 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.494442 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" (UID: "a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579782 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579814 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579825 4763 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579837 4763 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579847 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579856 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579865 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579875 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579883 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579893 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579917 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g4dd\" (UniqueName: \"kubernetes.io/projected/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-kube-api-access-8g4dd\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579926 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579933 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:48 crc kubenswrapper[4763]: I1206 08:14:48.579942 4763 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.203269 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c3295040c2b3971037d050d7261514a1eb3d6965276d8b53064ca675cb5e7bbd"} Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.204493 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" event={"ID":"a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce","Type":"ContainerDied","Data":"1700f66293ec65fda97f2fef752796f4b4caf923d3a06a431ce0dfc5f1ae82c0"} Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.204550 4763 scope.go:117] "RemoveContainer" containerID="a92efb1a1057138e4569df3e062be09f070a912ac3c306b48fde3bd5a60ec382" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.204548 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.205338 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.205613 4763 status_manager.go:851] "Failed to get status for pod" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-lrzmx\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.206386 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.206729 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.207434 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.207685 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.207935 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.208342 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.219049 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.219552 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.219836 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.220134 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.220500 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.220710 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.220916 4763 status_manager.go:851] "Failed to get status for pod" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-lrzmx\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:49 crc kubenswrapper[4763]: I1206 08:14:49.221241 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.218181 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.218695 4763 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093" exitCode=1 Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.218796 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093"} Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.219300 4763 scope.go:117] "RemoveContainer" containerID="d0529710afe7b975ccf39d5adda2da2e67315e1cfd5fa4e22464655bcb955093" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.219679 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220033 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220251 4763 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220433 4763 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="c3295040c2b3971037d050d7261514a1eb3d6965276d8b53064ca675cb5e7bbd" exitCode=0 Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220435 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220458 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"c3295040c2b3971037d050d7261514a1eb3d6965276d8b53064ca675cb5e7bbd"} Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220645 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220673 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220688 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:51 crc kubenswrapper[4763]: E1206 08:14:51.220926 4763 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.220945 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.221285 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.221627 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.221877 4763 status_manager.go:851] "Failed to get status for pod" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-lrzmx\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.223071 4763 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.224245 4763 status_manager.go:851] "Failed to get status for pod" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" pod="openshift-authentication/oauth-openshift-558db77b4-lrzmx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-lrzmx\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.224517 4763 status_manager.go:851] "Failed to get status for pod" podUID="fdc178a0-0a78-4b8b-85ba-32167fdbcf89" pod="openshift-image-registry/image-registry-66df7c8f76-hsbkg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-hsbkg\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.224724 4763 status_manager.go:851] "Failed to get status for pod" podUID="7dac1917-a2ac-4485-93ff-011dd58fcab7" pod="openshift-marketplace/redhat-operators-w6sjn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-w6sjn\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.225014 4763 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.225273 4763 status_manager.go:851] "Failed to get status for pod" podUID="31406d5a-2fb1-4c58-a333-8decda95ca2a" pod="openshift-marketplace/certified-operators-kvt7x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kvt7x\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.225532 4763 status_manager.go:851] "Failed to get status for pod" podUID="29279517-9ad6-4afc-9cfb-a895652124ed" pod="openshift-marketplace/community-operators-r5rgv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-r5rgv\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.226342 4763 status_manager.go:851] "Failed to get status for pod" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:51 crc kubenswrapper[4763]: I1206 08:14:51.226523 4763 status_manager.go:851] "Failed to get status for pod" podUID="a6f5ed36-5b72-45e8-8aea-5715275f5f41" pod="openshift-marketplace/redhat-marketplace-kg8kh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kg8kh\": dial tcp 38.102.83.18:6443: connect: connection refused" Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.231107 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3b44810b54f35702947c285a33cac331bf9b8fb165b813b616f68280f21aee68"} Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.231415 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9088e8af192f9fb1d9ab9c1e1c355829bccd2b10876931d93ad376f342b39a5c"} Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.231426 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6783cb11ed49a60c399300c0c1742ce02f08d98a68cadd690cc1a58d2840fa86"} Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.231435 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8ea480cce0af5554f09c73902401ef09d1101e4f69a44467fa5da6c8de992955"} Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.239258 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 06 08:14:52 crc kubenswrapper[4763]: I1206 08:14:52.239312 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eb36012eac90acaa4b3c174d2fd32c42d831c61f46102b1057343927f09d288d"} Dec 06 08:14:53 crc kubenswrapper[4763]: I1206 08:14:53.246618 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0d348f5ecb4bfe55ce9cc9dedd5b3ec708be79892620ff80ccfe12179bf60260"} Dec 06 08:14:53 crc kubenswrapper[4763]: I1206 08:14:53.246975 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:53 crc kubenswrapper[4763]: I1206 08:14:53.247010 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:53 crc kubenswrapper[4763]: I1206 08:14:53.246988 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:56 crc kubenswrapper[4763]: I1206 08:14:56.733129 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:56 crc kubenswrapper[4763]: I1206 08:14:56.733409 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:56 crc kubenswrapper[4763]: I1206 08:14:56.743247 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:57 crc kubenswrapper[4763]: I1206 08:14:57.229974 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.008560 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.014227 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.253171 4763 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.274704 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.274733 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.278431 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:14:58 crc kubenswrapper[4763]: I1206 08:14:58.280427 4763 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="76245bbe-c8fe-4e6e-ae05-73468a618f38" Dec 06 08:14:59 crc kubenswrapper[4763]: I1206 08:14:59.278599 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:14:59 crc kubenswrapper[4763]: I1206 08:14:59.278631 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:15:06 crc kubenswrapper[4763]: I1206 08:15:06.741338 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 06 08:15:06 crc kubenswrapper[4763]: I1206 08:15:06.741992 4763 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:15:06 crc kubenswrapper[4763]: I1206 08:15:06.742011 4763 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4413feb6-e0d5-46c0-9f03-8b07886f1cc8" Dec 06 08:15:07 crc kubenswrapper[4763]: I1206 08:15:07.237628 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 06 08:15:07 crc kubenswrapper[4763]: I1206 08:15:07.627707 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 06 08:15:07 crc kubenswrapper[4763]: I1206 08:15:07.707085 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 06 08:15:07 crc kubenswrapper[4763]: I1206 08:15:07.749240 4763 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="76245bbe-c8fe-4e6e-ae05-73468a618f38" Dec 06 08:15:07 crc kubenswrapper[4763]: I1206 08:15:07.911977 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 06 08:15:08 crc kubenswrapper[4763]: I1206 08:15:08.471834 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 06 08:15:08 crc kubenswrapper[4763]: I1206 08:15:08.525885 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 06 08:15:08 crc kubenswrapper[4763]: I1206 08:15:08.535678 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 06 08:15:08 crc kubenswrapper[4763]: I1206 08:15:08.611386 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 06 08:15:08 crc kubenswrapper[4763]: I1206 08:15:08.947127 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 06 08:15:09 crc kubenswrapper[4763]: I1206 08:15:09.357764 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 06 08:15:09 crc kubenswrapper[4763]: I1206 08:15:09.394853 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 06 08:15:09 crc kubenswrapper[4763]: I1206 08:15:09.530986 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 06 08:15:09 crc kubenswrapper[4763]: I1206 08:15:09.545940 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.200117 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.399021 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.628392 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.705012 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.754716 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.757218 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.758970 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.842361 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 06 08:15:10 crc kubenswrapper[4763]: I1206 08:15:10.905020 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.299108 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.582918 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.600738 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.613766 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.684401 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.685785 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.705310 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.733610 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.768336 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.779129 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.787229 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.823604 4763 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.823804 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r5rgv" podStartSLOduration=35.368306298 podStartE2EDuration="38.823762755s" podCreationTimestamp="2025-12-06 08:14:33 +0000 UTC" firstStartedPulling="2025-12-06 08:14:35.077880282 +0000 UTC m=+157.653585320" lastFinishedPulling="2025-12-06 08:14:38.533336739 +0000 UTC m=+161.109041777" observedRunningTime="2025-12-06 08:14:57.81800428 +0000 UTC m=+180.393709318" watchObservedRunningTime="2025-12-06 08:15:11.823762755 +0000 UTC m=+194.399467813" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.825130 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kvt7x" podStartSLOduration=39.446566923 podStartE2EDuration="40.825120095s" podCreationTimestamp="2025-12-06 08:14:31 +0000 UTC" firstStartedPulling="2025-12-06 08:14:34.06988876 +0000 UTC m=+156.645593798" lastFinishedPulling="2025-12-06 08:14:35.448441932 +0000 UTC m=+158.024146970" observedRunningTime="2025-12-06 08:14:58.023780503 +0000 UTC m=+180.599485551" watchObservedRunningTime="2025-12-06 08:15:11.825120095 +0000 UTC m=+194.400825153" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.826848 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w6sjn" podStartSLOduration=37.413139497 podStartE2EDuration="39.826840075s" podCreationTimestamp="2025-12-06 08:14:32 +0000 UTC" firstStartedPulling="2025-12-06 08:14:34.06779097 +0000 UTC m=+156.643496008" lastFinishedPulling="2025-12-06 08:14:36.481491548 +0000 UTC m=+159.057196586" observedRunningTime="2025-12-06 08:14:58.007352827 +0000 UTC m=+180.583057875" watchObservedRunningTime="2025-12-06 08:15:11.826840075 +0000 UTC m=+194.402545133" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.828876 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-lrzmx"] Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.828960 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.853124 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.853108964 podStartE2EDuration="13.853108964s" podCreationTimestamp="2025-12-06 08:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:15:11.849598463 +0000 UTC m=+194.425303501" watchObservedRunningTime="2025-12-06 08:15:11.853108964 +0000 UTC m=+194.428814002" Dec 06 08:15:11 crc kubenswrapper[4763]: I1206 08:15:11.888323 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.025414 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.038609 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.049208 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.200351 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.214750 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.253422 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.255492 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.314870 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.353177 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.382319 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.405029 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.418555 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.461403 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.463245 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.472031 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.472066 4763 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.536767 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.536864 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.553168 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.600091 4763 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.609473 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.698484 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.716274 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 06 08:15:12 crc kubenswrapper[4763]: I1206 08:15:12.786565 4763 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.000185 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.110194 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.181466 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.244324 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.257010 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.259186 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.324842 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.367163 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.397831 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.458319 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.462303 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.508997 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.727802 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" path="/var/lib/kubelet/pods/a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce/volumes" Dec 06 08:15:13 crc kubenswrapper[4763]: I1206 08:15:13.841527 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.008749 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.009779 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.081220 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.241955 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.286281 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.308000 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.311135 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.314550 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.340223 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.556046 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.643248 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.648803 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.652818 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.757406 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.781762 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.809384 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.866597 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.879345 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.917796 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 06 08:15:14 crc kubenswrapper[4763]: I1206 08:15:14.950838 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.044046 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.045918 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.054596 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.059377 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.131967 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.154473 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.362862 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.372715 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.496727 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.501999 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.522865 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.728449 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.800887 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.847024 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.874923 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 06 08:15:15 crc kubenswrapper[4763]: I1206 08:15:15.884077 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.159645 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.164928 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.209141 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.239593 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.275542 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.281128 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.302399 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.441697 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.510563 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.546936 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.643462 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.653084 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.675044 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.838278 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.910126 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.960395 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 06 08:15:16 crc kubenswrapper[4763]: I1206 08:15:16.982770 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.206089 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.432282 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.486560 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.523669 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.539965 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.588626 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.679454 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.707169 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.709364 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.747922 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.786316 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.870375 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.890352 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 06 08:15:17 crc kubenswrapper[4763]: I1206 08:15:17.937619 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.021879 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.031826 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.055521 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.063811 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.160984 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.210945 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.225671 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.230635 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.246549 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.247617 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.290129 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.297889 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.351454 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.421583 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.437709 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.461965 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.472079 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.491182 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.551164 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.560096 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.595716 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.656783 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.734460 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 06 08:15:18 crc kubenswrapper[4763]: I1206 08:15:18.794687 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.032156 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.040047 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.083192 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.138269 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.211263 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.284036 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.354930 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.407457 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.554813 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.584752 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.611861 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.651724 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.704573 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 06 08:15:19 crc kubenswrapper[4763]: I1206 08:15:19.899890 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.030516 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.130066 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.210358 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.281789 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.327627 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.419307 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.428918 4763 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.508384 4763 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.508621 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f" gracePeriod=5 Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.590553 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.617221 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.617311 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.632862 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.644551 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.801445 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.804574 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 06 08:15:20 crc kubenswrapper[4763]: I1206 08:15:20.987597 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.010136 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.241996 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.255121 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.262093 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.465535 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.580744 4763 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.591574 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.676647 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.701125 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.805970 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.824608 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 06 08:15:21 crc kubenswrapper[4763]: I1206 08:15:21.889286 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.018831 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.037492 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.133345 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.203243 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.218592 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.328598 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.345626 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.766328 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 06 08:15:22 crc kubenswrapper[4763]: I1206 08:15:22.909775 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 06 08:15:23 crc kubenswrapper[4763]: I1206 08:15:23.257515 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 06 08:15:23 crc kubenswrapper[4763]: I1206 08:15:23.494996 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 06 08:15:23 crc kubenswrapper[4763]: I1206 08:15:23.931305 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 06 08:15:24 crc kubenswrapper[4763]: I1206 08:15:24.225034 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.073689 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.074719 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181062 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181128 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181189 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181201 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181267 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181316 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181334 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181388 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181523 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181782 4763 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181800 4763 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181811 4763 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.181823 4763 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.190560 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.282601 4763 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.424344 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.424417 4763 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f" exitCode=137 Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.424472 4763 scope.go:117] "RemoveContainer" containerID="c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.424511 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.439988 4763 scope.go:117] "RemoveContainer" containerID="c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f" Dec 06 08:15:26 crc kubenswrapper[4763]: E1206 08:15:26.440363 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f\": container with ID starting with c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f not found: ID does not exist" containerID="c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f" Dec 06 08:15:26 crc kubenswrapper[4763]: I1206 08:15:26.440419 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f"} err="failed to get container status \"c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f\": rpc error: code = NotFound desc = could not find container \"c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f\": container with ID starting with c889f51d0593c1ffd398b13ca755fa50512d20b5184e172496fa2c8a4f81d36f not found: ID does not exist" Dec 06 08:15:27 crc kubenswrapper[4763]: I1206 08:15:27.734733 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 06 08:15:35 crc kubenswrapper[4763]: I1206 08:15:35.521054 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 06 08:15:37 crc kubenswrapper[4763]: I1206 08:15:37.511769 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 06 08:15:38 crc kubenswrapper[4763]: I1206 08:15:38.314149 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.078303 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" podUID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" containerName="registry" containerID="cri-o://a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25" gracePeriod=30 Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.411808 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481077 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481115 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481137 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481160 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481187 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrld8\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481340 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481378 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.481413 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca\") pod \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\" (UID: \"7d3f1284-20c9-4aa5-9c45-3cc96943980c\") " Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.482487 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.482503 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.491483 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.491764 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.491992 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.493037 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8" (OuterVolumeSpecName: "kube-api-access-hrld8") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "kube-api-access-hrld8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.495566 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.498093 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "7d3f1284-20c9-4aa5-9c45-3cc96943980c" (UID: "7d3f1284-20c9-4aa5-9c45-3cc96943980c"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.500412 4763 generic.go:334] "Generic (PLEG): container finished" podID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" containerID="a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25" exitCode=0 Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.500485 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" event={"ID":"7d3f1284-20c9-4aa5-9c45-3cc96943980c","Type":"ContainerDied","Data":"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25"} Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.500516 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" event={"ID":"7d3f1284-20c9-4aa5-9c45-3cc96943980c","Type":"ContainerDied","Data":"fa4f7518652394bad2320d85552c9bcb51d883dc7e42167dc3b291e197e64215"} Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.500563 4763 scope.go:117] "RemoveContainer" containerID="a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.500757 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vlddl" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.536688 4763 scope.go:117] "RemoveContainer" containerID="a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25" Dec 06 08:15:40 crc kubenswrapper[4763]: E1206 08:15:40.538018 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25\": container with ID starting with a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25 not found: ID does not exist" containerID="a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.538099 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25"} err="failed to get container status \"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25\": rpc error: code = NotFound desc = could not find container \"a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25\": container with ID starting with a919fa29bf65bc21227e50aecf3fdc5aa97e117453aef6877644aa4d07657e25 not found: ID does not exist" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.549764 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.553221 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vlddl"] Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582291 4763 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582320 4763 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582330 4763 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7d3f1284-20c9-4aa5-9c45-3cc96943980c-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582339 4763 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582347 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrld8\" (UniqueName: \"kubernetes.io/projected/7d3f1284-20c9-4aa5-9c45-3cc96943980c-kube-api-access-hrld8\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582355 4763 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7d3f1284-20c9-4aa5-9c45-3cc96943980c-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:40 crc kubenswrapper[4763]: I1206 08:15:40.582362 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3f1284-20c9-4aa5-9c45-3cc96943980c-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:41 crc kubenswrapper[4763]: I1206 08:15:41.726542 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" path="/var/lib/kubelet/pods/7d3f1284-20c9-4aa5-9c45-3cc96943980c/volumes" Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.266494 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.537116 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.537213 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.537285 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.538193 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:15:42 crc kubenswrapper[4763]: I1206 08:15:42.538272 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361" gracePeriod=600 Dec 06 08:15:43 crc kubenswrapper[4763]: I1206 08:15:43.167548 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:15:43 crc kubenswrapper[4763]: I1206 08:15:43.522239 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361" exitCode=0 Dec 06 08:15:43 crc kubenswrapper[4763]: I1206 08:15:43.522279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361"} Dec 06 08:15:43 crc kubenswrapper[4763]: I1206 08:15:43.522303 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5"} Dec 06 08:15:43 crc kubenswrapper[4763]: I1206 08:15:43.694005 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 06 08:15:44 crc kubenswrapper[4763]: I1206 08:15:44.360805 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 06 08:15:44 crc kubenswrapper[4763]: I1206 08:15:44.724490 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.536855 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.649934 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.650189 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" podUID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" containerName="controller-manager" containerID="cri-o://e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932" gracePeriod=30 Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.746097 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.746299 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" podUID="c6450aca-625d-4980-b576-8e24a98b87d8" containerName="route-controller-manager" containerID="cri-o://df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11" gracePeriod=30 Dec 06 08:15:45 crc kubenswrapper[4763]: I1206 08:15:45.956207 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.043277 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.049524 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca\") pod \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.049569 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8q2qn\" (UniqueName: \"kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn\") pod \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.049600 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert\") pod \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.049636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles\") pod \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.049720 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config\") pod \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\" (UID: \"66731e3f-7796-4ca9-a290-0b1f8ce568c6\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.050651 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca" (OuterVolumeSpecName: "client-ca") pod "66731e3f-7796-4ca9-a290-0b1f8ce568c6" (UID: "66731e3f-7796-4ca9-a290-0b1f8ce568c6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.050711 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config" (OuterVolumeSpecName: "config") pod "66731e3f-7796-4ca9-a290-0b1f8ce568c6" (UID: "66731e3f-7796-4ca9-a290-0b1f8ce568c6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.051489 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "66731e3f-7796-4ca9-a290-0b1f8ce568c6" (UID: "66731e3f-7796-4ca9-a290-0b1f8ce568c6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.055296 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "66731e3f-7796-4ca9-a290-0b1f8ce568c6" (UID: "66731e3f-7796-4ca9-a290-0b1f8ce568c6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.055354 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn" (OuterVolumeSpecName: "kube-api-access-8q2qn") pod "66731e3f-7796-4ca9-a290-0b1f8ce568c6" (UID: "66731e3f-7796-4ca9-a290-0b1f8ce568c6"). InnerVolumeSpecName "kube-api-access-8q2qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150479 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n2zr\" (UniqueName: \"kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr\") pod \"c6450aca-625d-4980-b576-8e24a98b87d8\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150533 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config\") pod \"c6450aca-625d-4980-b576-8e24a98b87d8\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150603 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca\") pod \"c6450aca-625d-4980-b576-8e24a98b87d8\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150624 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert\") pod \"c6450aca-625d-4980-b576-8e24a98b87d8\" (UID: \"c6450aca-625d-4980-b576-8e24a98b87d8\") " Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150804 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150814 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150823 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8q2qn\" (UniqueName: \"kubernetes.io/projected/66731e3f-7796-4ca9-a290-0b1f8ce568c6-kube-api-access-8q2qn\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150833 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66731e3f-7796-4ca9-a290-0b1f8ce568c6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.150841 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/66731e3f-7796-4ca9-a290-0b1f8ce568c6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.151641 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca" (OuterVolumeSpecName: "client-ca") pod "c6450aca-625d-4980-b576-8e24a98b87d8" (UID: "c6450aca-625d-4980-b576-8e24a98b87d8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.151663 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config" (OuterVolumeSpecName: "config") pod "c6450aca-625d-4980-b576-8e24a98b87d8" (UID: "c6450aca-625d-4980-b576-8e24a98b87d8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.153599 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c6450aca-625d-4980-b576-8e24a98b87d8" (UID: "c6450aca-625d-4980-b576-8e24a98b87d8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.153664 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr" (OuterVolumeSpecName: "kube-api-access-2n2zr") pod "c6450aca-625d-4980-b576-8e24a98b87d8" (UID: "c6450aca-625d-4980-b576-8e24a98b87d8"). InnerVolumeSpecName "kube-api-access-2n2zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.251950 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.251977 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6450aca-625d-4980-b576-8e24a98b87d8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.251986 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n2zr\" (UniqueName: \"kubernetes.io/projected/c6450aca-625d-4980-b576-8e24a98b87d8-kube-api-access-2n2zr\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.251996 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6450aca-625d-4980-b576-8e24a98b87d8-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.543748 4763 generic.go:334] "Generic (PLEG): container finished" podID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" containerID="e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932" exitCode=0 Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.543835 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" event={"ID":"66731e3f-7796-4ca9-a290-0b1f8ce568c6","Type":"ContainerDied","Data":"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932"} Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.543868 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" event={"ID":"66731e3f-7796-4ca9-a290-0b1f8ce568c6","Type":"ContainerDied","Data":"12649fa95d3a5d8b9e74cbe1b265636633c99ac79eb57317ed31f7352e3f86fd"} Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.543912 4763 scope.go:117] "RemoveContainer" containerID="e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.543894 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lfzzr" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.545545 4763 generic.go:334] "Generic (PLEG): container finished" podID="c6450aca-625d-4980-b576-8e24a98b87d8" containerID="df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11" exitCode=0 Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.545581 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" event={"ID":"c6450aca-625d-4980-b576-8e24a98b87d8","Type":"ContainerDied","Data":"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11"} Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.545605 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" event={"ID":"c6450aca-625d-4980-b576-8e24a98b87d8","Type":"ContainerDied","Data":"769df3d243aa3e0db12dd99267c522f00ecda8b5b7113b93c1d67d9686094d3a"} Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.545605 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.560079 4763 scope.go:117] "RemoveContainer" containerID="e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932" Dec 06 08:15:46 crc kubenswrapper[4763]: E1206 08:15:46.560780 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932\": container with ID starting with e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932 not found: ID does not exist" containerID="e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.560809 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932"} err="failed to get container status \"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932\": rpc error: code = NotFound desc = could not find container \"e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932\": container with ID starting with e6ee1f5944f9ed6651de43c05a5ed413844ca259b28c94b4ed7f1d768e553932 not found: ID does not exist" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.560833 4763 scope.go:117] "RemoveContainer" containerID="df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.574958 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.579029 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nvxmd"] Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.582003 4763 scope.go:117] "RemoveContainer" containerID="df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11" Dec 06 08:15:46 crc kubenswrapper[4763]: E1206 08:15:46.582413 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11\": container with ID starting with df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11 not found: ID does not exist" containerID="df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.582450 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11"} err="failed to get container status \"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11\": rpc error: code = NotFound desc = could not find container \"df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11\": container with ID starting with df60fd3fe9d7ad466e6e9f803ae9ccdbfa7cc605b8b51df6c76e859494973c11 not found: ID does not exist" Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.586077 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:15:46 crc kubenswrapper[4763]: I1206 08:15:46.588920 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lfzzr"] Dec 06 08:15:47 crc kubenswrapper[4763]: I1206 08:15:47.290742 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 06 08:15:47 crc kubenswrapper[4763]: I1206 08:15:47.726297 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" path="/var/lib/kubelet/pods/66731e3f-7796-4ca9-a290-0b1f8ce568c6/volumes" Dec 06 08:15:47 crc kubenswrapper[4763]: I1206 08:15:47.726989 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6450aca-625d-4980-b576-8e24a98b87d8" path="/var/lib/kubelet/pods/c6450aca-625d-4980-b576-8e24a98b87d8/volumes" Dec 06 08:15:48 crc kubenswrapper[4763]: I1206 08:15:48.774302 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.056349 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461611 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-d878cb77-xsgtj"] Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.461850 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6450aca-625d-4980-b576-8e24a98b87d8" containerName="route-controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461867 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6450aca-625d-4980-b576-8e24a98b87d8" containerName="route-controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.461879 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" containerName="controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461888 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" containerName="controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.461927 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461936 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.461946 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" containerName="installer" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461956 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" containerName="installer" Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.461983 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" containerName="registry" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.461994 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" containerName="registry" Dec 06 08:15:50 crc kubenswrapper[4763]: E1206 08:15:50.462006 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462014 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462126 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d3f1284-20c9-4aa5-9c45-3cc96943980c" containerName="registry" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462136 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="66731e3f-7796-4ca9-a290-0b1f8ce568c6" containerName="controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462146 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6450aca-625d-4980-b576-8e24a98b87d8" containerName="route-controller-manager" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462159 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462175 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a23e3b9b-25e2-44fa-a5c1-1ebff2d9ffce" containerName="oauth-openshift" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462186 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ded1af-d1b2-44f7-899e-12946c3e3a8e" containerName="installer" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.462629 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.473213 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.473569 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.473874 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.474550 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.473970 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw"] Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.474641 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.474808 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.474172 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.474485 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.475446 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.475486 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.475546 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.475708 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.477172 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.479984 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.481395 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.483706 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.486999 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.487822 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw"] Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.492809 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.493145 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.493370 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.493488 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.493596 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.493955 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.494352 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.494402 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.494560 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.499926 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d878cb77-xsgtj"] Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.502199 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607463 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-error\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607514 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6g5l\" (UniqueName: \"kubernetes.io/projected/a65d2c98-332d-4af4-a137-1f3c578dd868-kube-api-access-s6g5l\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607540 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-router-certs\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607565 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607583 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607614 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607704 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-dir\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607767 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-service-ca\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607806 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttqhj\" (UniqueName: \"kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607844 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607868 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldjzz\" (UniqueName: \"kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.607965 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-policies\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608009 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608064 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608211 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608349 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608388 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608454 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608514 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-login\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608544 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.608566 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-session\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709510 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709573 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-dir\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709598 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-service-ca\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709622 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttqhj\" (UniqueName: \"kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709652 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709673 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldjzz\" (UniqueName: \"kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709693 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-policies\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709714 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709742 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709773 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709802 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709828 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709856 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709879 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709922 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-login\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709947 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-session\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709975 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6g5l\" (UniqueName: \"kubernetes.io/projected/a65d2c98-332d-4af4-a137-1f3c578dd868-kube-api-access-s6g5l\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.709995 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-error\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.710019 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-router-certs\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.710049 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.710074 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.711151 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.713355 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-dir\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.713715 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.714225 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.714547 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.714853 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.716372 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-audit-policies\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.717613 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-service-ca\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.718153 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-session\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.718602 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.719409 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-error\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.720582 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-login\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.722131 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.723862 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.724458 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.724878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.725527 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-system-router-certs\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.726385 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a65d2c98-332d-4af4-a137-1f3c578dd868-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.733591 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.736445 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldjzz\" (UniqueName: \"kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz\") pod \"collect-profiles-29416815-d9cnw\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.737204 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6g5l\" (UniqueName: \"kubernetes.io/projected/a65d2c98-332d-4af4-a137-1f3c578dd868-kube-api-access-s6g5l\") pod \"oauth-openshift-d878cb77-xsgtj\" (UID: \"a65d2c98-332d-4af4-a137-1f3c578dd868\") " pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.745263 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttqhj\" (UniqueName: \"kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj\") pod \"route-controller-manager-5596fc856c-cwwv2\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.783772 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.800358 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.811123 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.819958 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:50 crc kubenswrapper[4763]: I1206 08:15:50.983710 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw"] Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.230704 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.234750 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d878cb77-xsgtj"] Dec 06 08:15:51 crc kubenswrapper[4763]: W1206 08:15:51.235922 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd06bf214_f96b_4332_8098_a2d6b8bb5039.slice/crio-d18207973b35f8930c3884de636bdf96bdf70cc7b4f494acca56afd3d01190b2 WatchSource:0}: Error finding container d18207973b35f8930c3884de636bdf96bdf70cc7b4f494acca56afd3d01190b2: Status 404 returned error can't find the container with id d18207973b35f8930c3884de636bdf96bdf70cc7b4f494acca56afd3d01190b2 Dec 06 08:15:51 crc kubenswrapper[4763]: W1206 08:15:51.236542 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda65d2c98_332d_4af4_a137_1f3c578dd868.slice/crio-874560bac210cb50a8bd75ac02ba2f238a29fcabb689b42d767e1e6579c37165 WatchSource:0}: Error finding container 874560bac210cb50a8bd75ac02ba2f238a29fcabb689b42d767e1e6579c37165: Status 404 returned error can't find the container with id 874560bac210cb50a8bd75ac02ba2f238a29fcabb689b42d767e1e6579c37165 Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.246084 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.577718 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" event={"ID":"d06bf214-f96b-4332-8098-a2d6b8bb5039","Type":"ContainerStarted","Data":"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.578031 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.578042 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" event={"ID":"d06bf214-f96b-4332-8098-a2d6b8bb5039","Type":"ContainerStarted","Data":"d18207973b35f8930c3884de636bdf96bdf70cc7b4f494acca56afd3d01190b2"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.580487 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" event={"ID":"a65d2c98-332d-4af4-a137-1f3c578dd868","Type":"ContainerStarted","Data":"5d3ff06995d7245f17524a7a0040ffcbefbf6cdcf0ce07db52c1544b58513406"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.580571 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" event={"ID":"a65d2c98-332d-4af4-a137-1f3c578dd868","Type":"ContainerStarted","Data":"874560bac210cb50a8bd75ac02ba2f238a29fcabb689b42d767e1e6579c37165"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.580644 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.581747 4763 patch_prober.go:28] interesting pod/oauth-openshift-d878cb77-xsgtj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.64:6443/healthz\": dial tcp 10.217.0.64:6443: connect: connection refused" start-of-body= Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.581859 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" podUID="a65d2c98-332d-4af4-a137-1f3c578dd868" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.64:6443/healthz\": dial tcp 10.217.0.64:6443: connect: connection refused" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.582024 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" event={"ID":"459c6292-1876-41a5-9546-9a87fd104b1a","Type":"ContainerStarted","Data":"2abd3b0852dea0756d020e4b9edb87b25fcdf770433c0fb4e835e6fb65d8d0b4"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.582045 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" event={"ID":"459c6292-1876-41a5-9546-9a87fd104b1a","Type":"ContainerStarted","Data":"21409bee2721f9c95cd53db126e463caeb24cb5c31f43081209287147b4a9362"} Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.592787 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" podStartSLOduration=6.592753775 podStartE2EDuration="6.592753775s" podCreationTimestamp="2025-12-06 08:15:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:15:51.589219213 +0000 UTC m=+234.164924261" watchObservedRunningTime="2025-12-06 08:15:51.592753775 +0000 UTC m=+234.168458823" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.607161 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" podStartSLOduration=37.607142206 podStartE2EDuration="37.607142206s" podCreationTimestamp="2025-12-06 08:15:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:15:51.603857932 +0000 UTC m=+234.179562970" watchObservedRunningTime="2025-12-06 08:15:51.607142206 +0000 UTC m=+234.182847244" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.621500 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" podStartSLOduration=92.621481396 podStartE2EDuration="1m32.621481396s" podCreationTimestamp="2025-12-06 08:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:15:51.621394513 +0000 UTC m=+234.197099551" watchObservedRunningTime="2025-12-06 08:15:51.621481396 +0000 UTC m=+234.197186434" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.770474 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 06 08:15:51 crc kubenswrapper[4763]: I1206 08:15:51.802235 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.143991 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.144636 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.146484 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.146490 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.149837 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.150052 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.150182 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.152089 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.160981 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.161758 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.227967 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.228053 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.228093 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw8bc\" (UniqueName: \"kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.228112 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.228135 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.329141 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.329191 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw8bc\" (UniqueName: \"kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.329220 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.329256 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.329305 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.330304 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.330771 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.331325 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.342216 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.344000 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw8bc\" (UniqueName: \"kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc\") pod \"controller-manager-74487f4477-pvxlb\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.470002 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.589047 4763 generic.go:334] "Generic (PLEG): container finished" podID="459c6292-1876-41a5-9546-9a87fd104b1a" containerID="2abd3b0852dea0756d020e4b9edb87b25fcdf770433c0fb4e835e6fb65d8d0b4" exitCode=0 Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.589308 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" event={"ID":"459c6292-1876-41a5-9546-9a87fd104b1a","Type":"ContainerDied","Data":"2abd3b0852dea0756d020e4b9edb87b25fcdf770433c0fb4e835e6fb65d8d0b4"} Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.597502 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-d878cb77-xsgtj" Dec 06 08:15:52 crc kubenswrapper[4763]: I1206 08:15:52.708639 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:15:52 crc kubenswrapper[4763]: W1206 08:15:52.715000 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78583062_9f4b_45b0_8025_7421a4a1956d.slice/crio-1e2ba0f9d0ef18d582f9889dfa666211031e1040cd87d49b6a06bbb26bfe8288 WatchSource:0}: Error finding container 1e2ba0f9d0ef18d582f9889dfa666211031e1040cd87d49b6a06bbb26bfe8288: Status 404 returned error can't find the container with id 1e2ba0f9d0ef18d582f9889dfa666211031e1040cd87d49b6a06bbb26bfe8288 Dec 06 08:15:53 crc kubenswrapper[4763]: I1206 08:15:53.602538 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" event={"ID":"78583062-9f4b-45b0-8025-7421a4a1956d","Type":"ContainerStarted","Data":"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56"} Dec 06 08:15:53 crc kubenswrapper[4763]: I1206 08:15:53.603173 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" event={"ID":"78583062-9f4b-45b0-8025-7421a4a1956d","Type":"ContainerStarted","Data":"1e2ba0f9d0ef18d582f9889dfa666211031e1040cd87d49b6a06bbb26bfe8288"} Dec 06 08:15:53 crc kubenswrapper[4763]: I1206 08:15:53.622102 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" podStartSLOduration=8.622081476 podStartE2EDuration="8.622081476s" podCreationTimestamp="2025-12-06 08:15:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:15:53.619330127 +0000 UTC m=+236.195035155" watchObservedRunningTime="2025-12-06 08:15:53.622081476 +0000 UTC m=+236.197786514" Dec 06 08:15:53 crc kubenswrapper[4763]: I1206 08:15:53.810375 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 06 08:15:53 crc kubenswrapper[4763]: I1206 08:15:53.929656 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.056473 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume\") pod \"459c6292-1876-41a5-9546-9a87fd104b1a\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.057833 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldjzz\" (UniqueName: \"kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz\") pod \"459c6292-1876-41a5-9546-9a87fd104b1a\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.058261 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume\") pod \"459c6292-1876-41a5-9546-9a87fd104b1a\" (UID: \"459c6292-1876-41a5-9546-9a87fd104b1a\") " Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.058970 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume" (OuterVolumeSpecName: "config-volume") pod "459c6292-1876-41a5-9546-9a87fd104b1a" (UID: "459c6292-1876-41a5-9546-9a87fd104b1a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.059338 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/459c6292-1876-41a5-9546-9a87fd104b1a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.062105 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz" (OuterVolumeSpecName: "kube-api-access-ldjzz") pod "459c6292-1876-41a5-9546-9a87fd104b1a" (UID: "459c6292-1876-41a5-9546-9a87fd104b1a"). InnerVolumeSpecName "kube-api-access-ldjzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.064455 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "459c6292-1876-41a5-9546-9a87fd104b1a" (UID: "459c6292-1876-41a5-9546-9a87fd104b1a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.160102 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/459c6292-1876-41a5-9546-9a87fd104b1a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.160365 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldjzz\" (UniqueName: \"kubernetes.io/projected/459c6292-1876-41a5-9546-9a87fd104b1a-kube-api-access-ldjzz\") on node \"crc\" DevicePath \"\"" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.242116 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.611166 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.611168 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw" event={"ID":"459c6292-1876-41a5-9546-9a87fd104b1a","Type":"ContainerDied","Data":"21409bee2721f9c95cd53db126e463caeb24cb5c31f43081209287147b4a9362"} Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.612778 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21409bee2721f9c95cd53db126e463caeb24cb5c31f43081209287147b4a9362" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.613105 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.621698 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:15:54 crc kubenswrapper[4763]: I1206 08:15:54.814533 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 06 08:15:57 crc kubenswrapper[4763]: I1206 08:15:57.292753 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 06 08:15:58 crc kubenswrapper[4763]: I1206 08:15:58.006062 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 06 08:15:59 crc kubenswrapper[4763]: I1206 08:15:59.056255 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 06 08:15:59 crc kubenswrapper[4763]: I1206 08:15:59.088091 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.078131 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.078932 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" podUID="78583062-9f4b-45b0-8025-7421a4a1956d" containerName="controller-manager" containerID="cri-o://2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56" gracePeriod=30 Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.173218 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.173430 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" podUID="d06bf214-f96b-4332-8098-a2d6b8bb5039" containerName="route-controller-manager" containerID="cri-o://074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e" gracePeriod=30 Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.534423 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.538264 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.726769 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw8bc\" (UniqueName: \"kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc\") pod \"78583062-9f4b-45b0-8025-7421a4a1956d\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727167 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles\") pod \"78583062-9f4b-45b0-8025-7421a4a1956d\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727207 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config\") pod \"d06bf214-f96b-4332-8098-a2d6b8bb5039\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727300 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert\") pod \"78583062-9f4b-45b0-8025-7421a4a1956d\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727330 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert\") pod \"d06bf214-f96b-4332-8098-a2d6b8bb5039\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727364 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttqhj\" (UniqueName: \"kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj\") pod \"d06bf214-f96b-4332-8098-a2d6b8bb5039\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727393 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca\") pod \"d06bf214-f96b-4332-8098-a2d6b8bb5039\" (UID: \"d06bf214-f96b-4332-8098-a2d6b8bb5039\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727472 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config\") pod \"78583062-9f4b-45b0-8025-7421a4a1956d\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.727501 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca\") pod \"78583062-9f4b-45b0-8025-7421a4a1956d\" (UID: \"78583062-9f4b-45b0-8025-7421a4a1956d\") " Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.728564 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca" (OuterVolumeSpecName: "client-ca") pod "d06bf214-f96b-4332-8098-a2d6b8bb5039" (UID: "d06bf214-f96b-4332-8098-a2d6b8bb5039"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.728630 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "78583062-9f4b-45b0-8025-7421a4a1956d" (UID: "78583062-9f4b-45b0-8025-7421a4a1956d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.728654 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca" (OuterVolumeSpecName: "client-ca") pod "78583062-9f4b-45b0-8025-7421a4a1956d" (UID: "78583062-9f4b-45b0-8025-7421a4a1956d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.728703 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config" (OuterVolumeSpecName: "config") pod "d06bf214-f96b-4332-8098-a2d6b8bb5039" (UID: "d06bf214-f96b-4332-8098-a2d6b8bb5039"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.729393 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config" (OuterVolumeSpecName: "config") pod "78583062-9f4b-45b0-8025-7421a4a1956d" (UID: "78583062-9f4b-45b0-8025-7421a4a1956d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.732595 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d06bf214-f96b-4332-8098-a2d6b8bb5039" (UID: "d06bf214-f96b-4332-8098-a2d6b8bb5039"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.732703 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc" (OuterVolumeSpecName: "kube-api-access-cw8bc") pod "78583062-9f4b-45b0-8025-7421a4a1956d" (UID: "78583062-9f4b-45b0-8025-7421a4a1956d"). InnerVolumeSpecName "kube-api-access-cw8bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.732702 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "78583062-9f4b-45b0-8025-7421a4a1956d" (UID: "78583062-9f4b-45b0-8025-7421a4a1956d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.732743 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj" (OuterVolumeSpecName: "kube-api-access-ttqhj") pod "d06bf214-f96b-4332-8098-a2d6b8bb5039" (UID: "d06bf214-f96b-4332-8098-a2d6b8bb5039"). InnerVolumeSpecName "kube-api-access-ttqhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.787850 4763 generic.go:334] "Generic (PLEG): container finished" podID="d06bf214-f96b-4332-8098-a2d6b8bb5039" containerID="074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e" exitCode=0 Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.787893 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" event={"ID":"d06bf214-f96b-4332-8098-a2d6b8bb5039","Type":"ContainerDied","Data":"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e"} Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.787955 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" event={"ID":"d06bf214-f96b-4332-8098-a2d6b8bb5039","Type":"ContainerDied","Data":"d18207973b35f8930c3884de636bdf96bdf70cc7b4f494acca56afd3d01190b2"} Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.787909 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.787973 4763 scope.go:117] "RemoveContainer" containerID="074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.789166 4763 generic.go:334] "Generic (PLEG): container finished" podID="78583062-9f4b-45b0-8025-7421a4a1956d" containerID="2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56" exitCode=0 Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.789195 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" event={"ID":"78583062-9f4b-45b0-8025-7421a4a1956d","Type":"ContainerDied","Data":"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56"} Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.789207 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.789217 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74487f4477-pvxlb" event={"ID":"78583062-9f4b-45b0-8025-7421a4a1956d","Type":"ContainerDied","Data":"1e2ba0f9d0ef18d582f9889dfa666211031e1040cd87d49b6a06bbb26bfe8288"} Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.806196 4763 scope.go:117] "RemoveContainer" containerID="074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e" Dec 06 08:16:28 crc kubenswrapper[4763]: E1206 08:16:28.806679 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e\": container with ID starting with 074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e not found: ID does not exist" containerID="074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.806772 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e"} err="failed to get container status \"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e\": rpc error: code = NotFound desc = could not find container \"074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e\": container with ID starting with 074ca26f2ed3c9b365ff0b4ca6f77c2a893c27999366cefb5b58080e0add260e not found: ID does not exist" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.806857 4763 scope.go:117] "RemoveContainer" containerID="2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.821749 4763 scope.go:117] "RemoveContainer" containerID="2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56" Dec 06 08:16:28 crc kubenswrapper[4763]: E1206 08:16:28.823273 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56\": container with ID starting with 2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56 not found: ID does not exist" containerID="2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.823325 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56"} err="failed to get container status \"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56\": rpc error: code = NotFound desc = could not find container \"2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56\": container with ID starting with 2348447445f0fd70863a5a87f504a536dc211672d36a29d72d9e7899242dee56 not found: ID does not exist" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.823685 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.827577 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-74487f4477-pvxlb"] Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828376 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828395 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78583062-9f4b-45b0-8025-7421a4a1956d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828406 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d06bf214-f96b-4332-8098-a2d6b8bb5039-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828415 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttqhj\" (UniqueName: \"kubernetes.io/projected/d06bf214-f96b-4332-8098-a2d6b8bb5039-kube-api-access-ttqhj\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828424 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d06bf214-f96b-4332-8098-a2d6b8bb5039-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828431 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828439 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828448 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw8bc\" (UniqueName: \"kubernetes.io/projected/78583062-9f4b-45b0-8025-7421a4a1956d-kube-api-access-cw8bc\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.828455 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78583062-9f4b-45b0-8025-7421a4a1956d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.830391 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:16:28 crc kubenswrapper[4763]: I1206 08:16:28.832969 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5596fc856c-cwwv2"] Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169327 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:29 crc kubenswrapper[4763]: E1206 08:16:29.169575 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="459c6292-1876-41a5-9546-9a87fd104b1a" containerName="collect-profiles" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169591 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="459c6292-1876-41a5-9546-9a87fd104b1a" containerName="collect-profiles" Dec 06 08:16:29 crc kubenswrapper[4763]: E1206 08:16:29.169613 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06bf214-f96b-4332-8098-a2d6b8bb5039" containerName="route-controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169621 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06bf214-f96b-4332-8098-a2d6b8bb5039" containerName="route-controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: E1206 08:16:29.169633 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78583062-9f4b-45b0-8025-7421a4a1956d" containerName="controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169641 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="78583062-9f4b-45b0-8025-7421a4a1956d" containerName="controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169781 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="78583062-9f4b-45b0-8025-7421a4a1956d" containerName="controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169802 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="459c6292-1876-41a5-9546-9a87fd104b1a" containerName="collect-profiles" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.169819 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d06bf214-f96b-4332-8098-a2d6b8bb5039" containerName="route-controller-manager" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.170303 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.173130 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.173584 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.173785 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.175373 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.175892 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.183928 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.185921 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.189598 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.251271 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvlcj\" (UniqueName: \"kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.251342 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.251386 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.251408 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.251434 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.352449 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvlcj\" (UniqueName: \"kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.352515 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.352557 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.352582 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.352600 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.353884 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.353981 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.354121 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.356380 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.372378 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvlcj\" (UniqueName: \"kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj\") pod \"controller-manager-7f7c59b9c6-mvbf8\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.493121 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.729252 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78583062-9f4b-45b0-8025-7421a4a1956d" path="/var/lib/kubelet/pods/78583062-9f4b-45b0-8025-7421a4a1956d/volumes" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.729997 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d06bf214-f96b-4332-8098-a2d6b8bb5039" path="/var/lib/kubelet/pods/d06bf214-f96b-4332-8098-a2d6b8bb5039/volumes" Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.742017 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:29 crc kubenswrapper[4763]: I1206 08:16:29.794151 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" event={"ID":"e60db585-d933-4264-bef1-2fb1ebf2ea90","Type":"ContainerStarted","Data":"9549c7886ce4a71b8383db18ac9a96fb8af5be8b5785c981e742b8920cd1ec0c"} Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.168876 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.169794 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.171850 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.172002 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.172268 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.172331 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.172401 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.172910 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.180620 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.265363 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.265438 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.265467 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.265509 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhkww\" (UniqueName: \"kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.366545 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.366626 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhkww\" (UniqueName: \"kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.366664 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.366713 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.367541 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.367947 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.373519 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.384025 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhkww\" (UniqueName: \"kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww\") pod \"route-controller-manager-5f4f4c8688-8vvjr\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.485132 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.813324 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" event={"ID":"e60db585-d933-4264-bef1-2fb1ebf2ea90","Type":"ContainerStarted","Data":"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b"} Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.813715 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.819083 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.834449 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" podStartSLOduration=2.83442644 podStartE2EDuration="2.83442644s" podCreationTimestamp="2025-12-06 08:16:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:30.833394811 +0000 UTC m=+273.409099879" watchObservedRunningTime="2025-12-06 08:16:30.83442644 +0000 UTC m=+273.410131478" Dec 06 08:16:30 crc kubenswrapper[4763]: I1206 08:16:30.870348 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.818662 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" event={"ID":"bc72d97f-b101-4003-a191-e4f95076f7ab","Type":"ContainerStarted","Data":"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be"} Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.818987 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" event={"ID":"bc72d97f-b101-4003-a191-e4f95076f7ab","Type":"ContainerStarted","Data":"d81b1c0c1bff0506596f4443bc33d004d0c474260674387684bac7632001455f"} Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.819217 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.840396 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" podStartSLOduration=3.840377221 podStartE2EDuration="3.840377221s" podCreationTimestamp="2025-12-06 08:16:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:31.837771427 +0000 UTC m=+274.413476475" watchObservedRunningTime="2025-12-06 08:16:31.840377221 +0000 UTC m=+274.416082249" Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.842429 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:31 crc kubenswrapper[4763]: I1206 08:16:31.935129 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:32 crc kubenswrapper[4763]: I1206 08:16:32.248920 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:33 crc kubenswrapper[4763]: I1206 08:16:33.826347 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" podUID="bc72d97f-b101-4003-a191-e4f95076f7ab" containerName="route-controller-manager" containerID="cri-o://74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be" gracePeriod=30 Dec 06 08:16:33 crc kubenswrapper[4763]: I1206 08:16:33.826453 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" podUID="e60db585-d933-4264-bef1-2fb1ebf2ea90" containerName="controller-manager" containerID="cri-o://55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b" gracePeriod=30 Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.293089 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.298085 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.336243 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:34 crc kubenswrapper[4763]: E1206 08:16:34.336571 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60db585-d933-4264-bef1-2fb1ebf2ea90" containerName="controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.336583 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60db585-d933-4264-bef1-2fb1ebf2ea90" containerName="controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: E1206 08:16:34.336615 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc72d97f-b101-4003-a191-e4f95076f7ab" containerName="route-controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.336836 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc72d97f-b101-4003-a191-e4f95076f7ab" containerName="route-controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.337157 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60db585-d933-4264-bef1-2fb1ebf2ea90" containerName="controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.337177 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc72d97f-b101-4003-a191-e4f95076f7ab" containerName="route-controller-manager" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.338745 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.351397 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424142 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvlcj\" (UniqueName: \"kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj\") pod \"e60db585-d933-4264-bef1-2fb1ebf2ea90\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424193 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert\") pod \"bc72d97f-b101-4003-a191-e4f95076f7ab\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424219 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca\") pod \"bc72d97f-b101-4003-a191-e4f95076f7ab\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424237 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles\") pod \"e60db585-d933-4264-bef1-2fb1ebf2ea90\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424260 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhkww\" (UniqueName: \"kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww\") pod \"bc72d97f-b101-4003-a191-e4f95076f7ab\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424282 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config\") pod \"bc72d97f-b101-4003-a191-e4f95076f7ab\" (UID: \"bc72d97f-b101-4003-a191-e4f95076f7ab\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424334 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca\") pod \"e60db585-d933-4264-bef1-2fb1ebf2ea90\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424352 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config\") pod \"e60db585-d933-4264-bef1-2fb1ebf2ea90\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.424388 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert\") pod \"e60db585-d933-4264-bef1-2fb1ebf2ea90\" (UID: \"e60db585-d933-4264-bef1-2fb1ebf2ea90\") " Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.427190 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca" (OuterVolumeSpecName: "client-ca") pod "bc72d97f-b101-4003-a191-e4f95076f7ab" (UID: "bc72d97f-b101-4003-a191-e4f95076f7ab"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.427652 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config" (OuterVolumeSpecName: "config") pod "bc72d97f-b101-4003-a191-e4f95076f7ab" (UID: "bc72d97f-b101-4003-a191-e4f95076f7ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.431256 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e60db585-d933-4264-bef1-2fb1ebf2ea90" (UID: "e60db585-d933-4264-bef1-2fb1ebf2ea90"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.431450 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e60db585-d933-4264-bef1-2fb1ebf2ea90" (UID: "e60db585-d933-4264-bef1-2fb1ebf2ea90"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.431597 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc72d97f-b101-4003-a191-e4f95076f7ab" (UID: "bc72d97f-b101-4003-a191-e4f95076f7ab"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.433545 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww" (OuterVolumeSpecName: "kube-api-access-fhkww") pod "bc72d97f-b101-4003-a191-e4f95076f7ab" (UID: "bc72d97f-b101-4003-a191-e4f95076f7ab"). InnerVolumeSpecName "kube-api-access-fhkww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.438376 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config" (OuterVolumeSpecName: "config") pod "e60db585-d933-4264-bef1-2fb1ebf2ea90" (UID: "e60db585-d933-4264-bef1-2fb1ebf2ea90"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.439254 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca" (OuterVolumeSpecName: "client-ca") pod "e60db585-d933-4264-bef1-2fb1ebf2ea90" (UID: "e60db585-d933-4264-bef1-2fb1ebf2ea90"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.444962 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj" (OuterVolumeSpecName: "kube-api-access-pvlcj") pod "e60db585-d933-4264-bef1-2fb1ebf2ea90" (UID: "e60db585-d933-4264-bef1-2fb1ebf2ea90"). InnerVolumeSpecName "kube-api-access-pvlcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.525880 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.525966 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q544n\" (UniqueName: \"kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.525996 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526016 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526069 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc72d97f-b101-4003-a191-e4f95076f7ab-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526085 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526095 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526107 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhkww\" (UniqueName: \"kubernetes.io/projected/bc72d97f-b101-4003-a191-e4f95076f7ab-kube-api-access-fhkww\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526120 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc72d97f-b101-4003-a191-e4f95076f7ab-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526132 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526142 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e60db585-d933-4264-bef1-2fb1ebf2ea90-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526153 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e60db585-d933-4264-bef1-2fb1ebf2ea90-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.526163 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvlcj\" (UniqueName: \"kubernetes.io/projected/e60db585-d933-4264-bef1-2fb1ebf2ea90-kube-api-access-pvlcj\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.627815 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.627885 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q544n\" (UniqueName: \"kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.627936 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.627954 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.628837 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.629512 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.632008 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.644189 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q544n\" (UniqueName: \"kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n\") pod \"route-controller-manager-6856fbf746-2x969\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.667210 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.833075 4763 generic.go:334] "Generic (PLEG): container finished" podID="bc72d97f-b101-4003-a191-e4f95076f7ab" containerID="74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be" exitCode=0 Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.833120 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.833140 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" event={"ID":"bc72d97f-b101-4003-a191-e4f95076f7ab","Type":"ContainerDied","Data":"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be"} Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.833531 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr" event={"ID":"bc72d97f-b101-4003-a191-e4f95076f7ab","Type":"ContainerDied","Data":"d81b1c0c1bff0506596f4443bc33d004d0c474260674387684bac7632001455f"} Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.833555 4763 scope.go:117] "RemoveContainer" containerID="74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.835189 4763 generic.go:334] "Generic (PLEG): container finished" podID="e60db585-d933-4264-bef1-2fb1ebf2ea90" containerID="55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b" exitCode=0 Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.835244 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" event={"ID":"e60db585-d933-4264-bef1-2fb1ebf2ea90","Type":"ContainerDied","Data":"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b"} Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.835261 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" event={"ID":"e60db585-d933-4264-bef1-2fb1ebf2ea90","Type":"ContainerDied","Data":"9549c7886ce4a71b8383db18ac9a96fb8af5be8b5785c981e742b8920cd1ec0c"} Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.835320 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.849675 4763 scope.go:117] "RemoveContainer" containerID="74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be" Dec 06 08:16:34 crc kubenswrapper[4763]: E1206 08:16:34.850245 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be\": container with ID starting with 74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be not found: ID does not exist" containerID="74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.850283 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be"} err="failed to get container status \"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be\": rpc error: code = NotFound desc = could not find container \"74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be\": container with ID starting with 74aa2bd2e1aa317afbc0167acec6ed7e14d74d59df69e06c91e2369fe29600be not found: ID does not exist" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.850308 4763 scope.go:117] "RemoveContainer" containerID="55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.861818 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.867123 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8vvjr"] Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.870950 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.874190 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-mvbf8"] Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.878037 4763 scope.go:117] "RemoveContainer" containerID="55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b" Dec 06 08:16:34 crc kubenswrapper[4763]: E1206 08:16:34.878649 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b\": container with ID starting with 55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b not found: ID does not exist" containerID="55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b" Dec 06 08:16:34 crc kubenswrapper[4763]: I1206 08:16:34.878697 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b"} err="failed to get container status \"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b\": rpc error: code = NotFound desc = could not find container \"55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b\": container with ID starting with 55eddfdd021022bb8176f5b528e46c78fd2dae8c145c2178133686a0ebe6e27b not found: ID does not exist" Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.046564 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:35 crc kubenswrapper[4763]: W1206 08:16:35.051546 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb420161e_a60d_4005_ab8b_0f4ecb164e37.slice/crio-d7d3a3b7d9437346fe901a72f2c14c9aa1b1678a23cc12e20055feeefd1e2dcc WatchSource:0}: Error finding container d7d3a3b7d9437346fe901a72f2c14c9aa1b1678a23cc12e20055feeefd1e2dcc: Status 404 returned error can't find the container with id d7d3a3b7d9437346fe901a72f2c14c9aa1b1678a23cc12e20055feeefd1e2dcc Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.725678 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc72d97f-b101-4003-a191-e4f95076f7ab" path="/var/lib/kubelet/pods/bc72d97f-b101-4003-a191-e4f95076f7ab/volumes" Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.726628 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60db585-d933-4264-bef1-2fb1ebf2ea90" path="/var/lib/kubelet/pods/e60db585-d933-4264-bef1-2fb1ebf2ea90/volumes" Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.842391 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" event={"ID":"b420161e-a60d-4005-ab8b-0f4ecb164e37","Type":"ContainerStarted","Data":"badaabefe24c74f2411b2d6383f02267e1d106c3d66594f51a72f17ccca07271"} Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.842438 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" event={"ID":"b420161e-a60d-4005-ab8b-0f4ecb164e37","Type":"ContainerStarted","Data":"d7d3a3b7d9437346fe901a72f2c14c9aa1b1678a23cc12e20055feeefd1e2dcc"} Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.842623 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.848040 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:35 crc kubenswrapper[4763]: I1206 08:16:35.865023 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" podStartSLOduration=3.865003658 podStartE2EDuration="3.865003658s" podCreationTimestamp="2025-12-06 08:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:35.862644123 +0000 UTC m=+278.438349161" watchObservedRunningTime="2025-12-06 08:16:35.865003658 +0000 UTC m=+278.440708696" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.172678 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.173322 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.175143 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.175813 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.175841 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.176057 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.177728 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.178432 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.182848 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.183867 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.358081 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.358125 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxpwm\" (UniqueName: \"kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.358161 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.358180 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.358213 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.459101 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.459454 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.459481 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxpwm\" (UniqueName: \"kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.459523 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.459553 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.460778 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.461099 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.462180 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.466398 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.474676 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxpwm\" (UniqueName: \"kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm\") pod \"controller-manager-6c5c8764-zqzql\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.493766 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:37 crc kubenswrapper[4763]: I1206 08:16:37.892826 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:37 crc kubenswrapper[4763]: W1206 08:16:37.910091 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fdf8206_597d_43b6_80be_52e1f9c25ac3.slice/crio-d56d53e5c0bb255938ba8d01f9ca0679f4d6674ec0bb9d19bea6f9aa484f0113 WatchSource:0}: Error finding container d56d53e5c0bb255938ba8d01f9ca0679f4d6674ec0bb9d19bea6f9aa484f0113: Status 404 returned error can't find the container with id d56d53e5c0bb255938ba8d01f9ca0679f4d6674ec0bb9d19bea6f9aa484f0113 Dec 06 08:16:38 crc kubenswrapper[4763]: I1206 08:16:38.858453 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" event={"ID":"8fdf8206-597d-43b6-80be-52e1f9c25ac3","Type":"ContainerStarted","Data":"116b5fb77bdf45058a068613a51ff39615ee40854c40e887998604b594e64267"} Dec 06 08:16:38 crc kubenswrapper[4763]: I1206 08:16:38.858770 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" event={"ID":"8fdf8206-597d-43b6-80be-52e1f9c25ac3","Type":"ContainerStarted","Data":"d56d53e5c0bb255938ba8d01f9ca0679f4d6674ec0bb9d19bea6f9aa484f0113"} Dec 06 08:16:38 crc kubenswrapper[4763]: I1206 08:16:38.859239 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:38 crc kubenswrapper[4763]: I1206 08:16:38.863433 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:38 crc kubenswrapper[4763]: I1206 08:16:38.875681 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" podStartSLOduration=7.87566265 podStartE2EDuration="7.87566265s" podCreationTimestamp="2025-12-06 08:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:38.872413008 +0000 UTC m=+281.448118046" watchObservedRunningTime="2025-12-06 08:16:38.87566265 +0000 UTC m=+281.451367688" Dec 06 08:16:48 crc kubenswrapper[4763]: I1206 08:16:48.077942 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:48 crc kubenswrapper[4763]: I1206 08:16:48.078628 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" podUID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" containerName="controller-manager" containerID="cri-o://116b5fb77bdf45058a068613a51ff39615ee40854c40e887998604b594e64267" gracePeriod=30 Dec 06 08:16:48 crc kubenswrapper[4763]: I1206 08:16:48.094865 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:48 crc kubenswrapper[4763]: I1206 08:16:48.095308 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" podUID="b420161e-a60d-4005-ab8b-0f4ecb164e37" containerName="route-controller-manager" containerID="cri-o://badaabefe24c74f2411b2d6383f02267e1d106c3d66594f51a72f17ccca07271" gracePeriod=30 Dec 06 08:16:49 crc kubenswrapper[4763]: I1206 08:16:49.917489 4763 generic.go:334] "Generic (PLEG): container finished" podID="b420161e-a60d-4005-ab8b-0f4ecb164e37" containerID="badaabefe24c74f2411b2d6383f02267e1d106c3d66594f51a72f17ccca07271" exitCode=0 Dec 06 08:16:49 crc kubenswrapper[4763]: I1206 08:16:49.917592 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" event={"ID":"b420161e-a60d-4005-ab8b-0f4ecb164e37","Type":"ContainerDied","Data":"badaabefe24c74f2411b2d6383f02267e1d106c3d66594f51a72f17ccca07271"} Dec 06 08:16:49 crc kubenswrapper[4763]: I1206 08:16:49.919006 4763 generic.go:334] "Generic (PLEG): container finished" podID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" containerID="116b5fb77bdf45058a068613a51ff39615ee40854c40e887998604b594e64267" exitCode=0 Dec 06 08:16:49 crc kubenswrapper[4763]: I1206 08:16:49.919036 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" event={"ID":"8fdf8206-597d-43b6-80be-52e1f9c25ac3","Type":"ContainerDied","Data":"116b5fb77bdf45058a068613a51ff39615ee40854c40e887998604b594e64267"} Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.143447 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.148179 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.179692 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc"] Dec 06 08:16:51 crc kubenswrapper[4763]: E1206 08:16:51.179915 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b420161e-a60d-4005-ab8b-0f4ecb164e37" containerName="route-controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.179930 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b420161e-a60d-4005-ab8b-0f4ecb164e37" containerName="route-controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: E1206 08:16:51.179948 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" containerName="controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.179954 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" containerName="controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.180039 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" containerName="controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.180049 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b420161e-a60d-4005-ab8b-0f4ecb164e37" containerName="route-controller-manager" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.180384 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.194818 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc"] Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333296 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca\") pod \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333345 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert\") pod \"b420161e-a60d-4005-ab8b-0f4ecb164e37\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333375 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca\") pod \"b420161e-a60d-4005-ab8b-0f4ecb164e37\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333404 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxpwm\" (UniqueName: \"kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm\") pod \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333482 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles\") pod \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333524 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q544n\" (UniqueName: \"kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n\") pod \"b420161e-a60d-4005-ab8b-0f4ecb164e37\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333577 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert\") pod \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333604 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config\") pod \"b420161e-a60d-4005-ab8b-0f4ecb164e37\" (UID: \"b420161e-a60d-4005-ab8b-0f4ecb164e37\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333625 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config\") pod \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\" (UID: \"8fdf8206-597d-43b6-80be-52e1f9c25ac3\") " Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333941 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54699d92-cbce-4e19-b102-2c9e7d60c945-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.333973 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn2m9\" (UniqueName: \"kubernetes.io/projected/54699d92-cbce-4e19-b102-2c9e7d60c945-kube-api-access-cn2m9\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.334016 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-client-ca\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.334039 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-config\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.334936 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8fdf8206-597d-43b6-80be-52e1f9c25ac3" (UID: "8fdf8206-597d-43b6-80be-52e1f9c25ac3"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.335383 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca" (OuterVolumeSpecName: "client-ca") pod "b420161e-a60d-4005-ab8b-0f4ecb164e37" (UID: "b420161e-a60d-4005-ab8b-0f4ecb164e37"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.335597 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config" (OuterVolumeSpecName: "config") pod "8fdf8206-597d-43b6-80be-52e1f9c25ac3" (UID: "8fdf8206-597d-43b6-80be-52e1f9c25ac3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.336120 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config" (OuterVolumeSpecName: "config") pod "b420161e-a60d-4005-ab8b-0f4ecb164e37" (UID: "b420161e-a60d-4005-ab8b-0f4ecb164e37"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.336373 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca" (OuterVolumeSpecName: "client-ca") pod "8fdf8206-597d-43b6-80be-52e1f9c25ac3" (UID: "8fdf8206-597d-43b6-80be-52e1f9c25ac3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.339750 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b420161e-a60d-4005-ab8b-0f4ecb164e37" (UID: "b420161e-a60d-4005-ab8b-0f4ecb164e37"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.339802 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm" (OuterVolumeSpecName: "kube-api-access-zxpwm") pod "8fdf8206-597d-43b6-80be-52e1f9c25ac3" (UID: "8fdf8206-597d-43b6-80be-52e1f9c25ac3"). InnerVolumeSpecName "kube-api-access-zxpwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.341285 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n" (OuterVolumeSpecName: "kube-api-access-q544n") pod "b420161e-a60d-4005-ab8b-0f4ecb164e37" (UID: "b420161e-a60d-4005-ab8b-0f4ecb164e37"). InnerVolumeSpecName "kube-api-access-q544n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.342036 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8fdf8206-597d-43b6-80be-52e1f9c25ac3" (UID: "8fdf8206-597d-43b6-80be-52e1f9c25ac3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.434950 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54699d92-cbce-4e19-b102-2c9e7d60c945-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435028 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn2m9\" (UniqueName: \"kubernetes.io/projected/54699d92-cbce-4e19-b102-2c9e7d60c945-kube-api-access-cn2m9\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435085 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-client-ca\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435119 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-config\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435172 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxpwm\" (UniqueName: \"kubernetes.io/projected/8fdf8206-597d-43b6-80be-52e1f9c25ac3-kube-api-access-zxpwm\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435189 4763 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435202 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q544n\" (UniqueName: \"kubernetes.io/projected/b420161e-a60d-4005-ab8b-0f4ecb164e37-kube-api-access-q544n\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435217 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fdf8206-597d-43b6-80be-52e1f9c25ac3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435229 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435242 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435254 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fdf8206-597d-43b6-80be-52e1f9c25ac3-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435266 4763 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b420161e-a60d-4005-ab8b-0f4ecb164e37-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.435276 4763 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b420161e-a60d-4005-ab8b-0f4ecb164e37-client-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.436632 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-config\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.437749 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54699d92-cbce-4e19-b102-2c9e7d60c945-client-ca\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.447851 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54699d92-cbce-4e19-b102-2c9e7d60c945-serving-cert\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.472950 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn2m9\" (UniqueName: \"kubernetes.io/projected/54699d92-cbce-4e19-b102-2c9e7d60c945-kube-api-access-cn2m9\") pod \"route-controller-manager-5f4f4c8688-8qshc\" (UID: \"54699d92-cbce-4e19-b102-2c9e7d60c945\") " pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.501273 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.933378 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" event={"ID":"8fdf8206-597d-43b6-80be-52e1f9c25ac3","Type":"ContainerDied","Data":"d56d53e5c0bb255938ba8d01f9ca0679f4d6674ec0bb9d19bea6f9aa484f0113"} Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.933476 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c5c8764-zqzql" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.934940 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.937444 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc"] Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.937533 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969" event={"ID":"b420161e-a60d-4005-ab8b-0f4ecb164e37","Type":"ContainerDied","Data":"d7d3a3b7d9437346fe901a72f2c14c9aa1b1678a23cc12e20055feeefd1e2dcc"} Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.937599 4763 scope.go:117] "RemoveContainer" containerID="116b5fb77bdf45058a068613a51ff39615ee40854c40e887998604b594e64267" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.965173 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.975046 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6c5c8764-zqzql"] Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.981785 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.985261 4763 scope.go:117] "RemoveContainer" containerID="badaabefe24c74f2411b2d6383f02267e1d106c3d66594f51a72f17ccca07271" Dec 06 08:16:51 crc kubenswrapper[4763]: I1206 08:16:51.986590 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6856fbf746-2x969"] Dec 06 08:16:52 crc kubenswrapper[4763]: I1206 08:16:52.946106 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" event={"ID":"54699d92-cbce-4e19-b102-2c9e7d60c945","Type":"ContainerStarted","Data":"d5f6e192f57360b12cadf47eb1d08928d2e5367aeedfc1e86025bfd851e8c9b3"} Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.184140 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm"] Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.185067 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.187595 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.188507 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.188784 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.188870 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.189060 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.189430 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.198656 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.202928 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm"] Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.361054 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.361290 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-config\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.361417 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-serving-cert\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.361588 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-client-ca\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.361687 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn9x9\" (UniqueName: \"kubernetes.io/projected/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-kube-api-access-mn9x9\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.463371 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-config\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.463446 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-serving-cert\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.463539 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-client-ca\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.463584 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn9x9\" (UniqueName: \"kubernetes.io/projected/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-kube-api-access-mn9x9\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.463622 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.465251 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-client-ca\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.465589 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-proxy-ca-bundles\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.466041 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-config\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.474465 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-serving-cert\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.493117 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn9x9\" (UniqueName: \"kubernetes.io/projected/e54a5a45-0f58-4d27-a252-cdfbc0e467ad-kube-api-access-mn9x9\") pod \"controller-manager-7f7c59b9c6-2vsrm\" (UID: \"e54a5a45-0f58-4d27-a252-cdfbc0e467ad\") " pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.509790 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.726532 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fdf8206-597d-43b6-80be-52e1f9c25ac3" path="/var/lib/kubelet/pods/8fdf8206-597d-43b6-80be-52e1f9c25ac3/volumes" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.728587 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b420161e-a60d-4005-ab8b-0f4ecb164e37" path="/var/lib/kubelet/pods/b420161e-a60d-4005-ab8b-0f4ecb164e37/volumes" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.942951 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm"] Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.971200 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" event={"ID":"54699d92-cbce-4e19-b102-2c9e7d60c945","Type":"ContainerStarted","Data":"de31601f632d6b4a04a827fdc9aef9e5637292338ee1b0cbca38e92839ce2af1"} Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.971861 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:53 crc kubenswrapper[4763]: I1206 08:16:53.993325 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" podStartSLOduration=5.993297235 podStartE2EDuration="5.993297235s" podCreationTimestamp="2025-12-06 08:16:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:53.990365422 +0000 UTC m=+296.566070480" watchObservedRunningTime="2025-12-06 08:16:53.993297235 +0000 UTC m=+296.569002273" Dec 06 08:16:54 crc kubenswrapper[4763]: I1206 08:16:54.013461 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f4f4c8688-8qshc" Dec 06 08:16:54 crc kubenswrapper[4763]: I1206 08:16:54.977805 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" event={"ID":"e54a5a45-0f58-4d27-a252-cdfbc0e467ad","Type":"ContainerStarted","Data":"a2b7b0c32e7ff96c73f15610df07ad7b3c18be66f4aaf56ff6f3da90a75409e9"} Dec 06 08:16:54 crc kubenswrapper[4763]: I1206 08:16:54.978216 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" event={"ID":"e54a5a45-0f58-4d27-a252-cdfbc0e467ad","Type":"ContainerStarted","Data":"08625ce5d7d1d925815f6c83f6986b894530dafccf1648a206e2bac6e0cfc029"} Dec 06 08:16:54 crc kubenswrapper[4763]: I1206 08:16:54.995281 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" podStartSLOduration=6.995262444 podStartE2EDuration="6.995262444s" podCreationTimestamp="2025-12-06 08:16:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:16:54.994753019 +0000 UTC m=+297.570458057" watchObservedRunningTime="2025-12-06 08:16:54.995262444 +0000 UTC m=+297.570967482" Dec 06 08:16:55 crc kubenswrapper[4763]: I1206 08:16:55.983207 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:16:55 crc kubenswrapper[4763]: I1206 08:16:55.987052 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f7c59b9c6-2vsrm" Dec 06 08:17:42 crc kubenswrapper[4763]: I1206 08:17:42.536934 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:17:42 crc kubenswrapper[4763]: I1206 08:17:42.538095 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:18:12 crc kubenswrapper[4763]: I1206 08:18:12.536347 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:18:12 crc kubenswrapper[4763]: I1206 08:18:12.536925 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:18:42 crc kubenswrapper[4763]: I1206 08:18:42.537050 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:18:42 crc kubenswrapper[4763]: I1206 08:18:42.537578 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:18:42 crc kubenswrapper[4763]: I1206 08:18:42.537621 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:18:42 crc kubenswrapper[4763]: I1206 08:18:42.538152 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:18:42 crc kubenswrapper[4763]: I1206 08:18:42.538198 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5" gracePeriod=600 Dec 06 08:18:43 crc kubenswrapper[4763]: I1206 08:18:43.572470 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5" exitCode=0 Dec 06 08:18:43 crc kubenswrapper[4763]: I1206 08:18:43.572570 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5"} Dec 06 08:18:43 crc kubenswrapper[4763]: I1206 08:18:43.573412 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f"} Dec 06 08:18:43 crc kubenswrapper[4763]: I1206 08:18:43.573445 4763 scope.go:117] "RemoveContainer" containerID="a1256d4f047003ba6e45d47bdb93343aa51db47f3443f45b14e5254ef56ca361" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.725481 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kdpkl"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.726868 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.729972 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.735063 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kdpkl"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.736561 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.741467 4763 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-5qpsj" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.746095 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g4fbj"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.747422 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g4fbj" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.751451 4763 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-cbfxd" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.751566 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-z4wp4"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.752443 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.755785 4763 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-qmrw2" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.757265 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-z4wp4"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.764302 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g4fbj"] Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.848559 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmdgr\" (UniqueName: \"kubernetes.io/projected/ac2a45ed-d601-4f3c-8594-05810a6bbd89-kube-api-access-jmdgr\") pod \"cert-manager-5b446d88c5-g4fbj\" (UID: \"ac2a45ed-d601-4f3c-8594-05810a6bbd89\") " pod="cert-manager/cert-manager-5b446d88c5-g4fbj" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.848620 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzqhx\" (UniqueName: \"kubernetes.io/projected/85545589-5ded-4bd2-a7d8-7ff1a449b321-kube-api-access-kzqhx\") pod \"cert-manager-cainjector-7f985d654d-kdpkl\" (UID: \"85545589-5ded-4bd2-a7d8-7ff1a449b321\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.949860 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5x5f\" (UniqueName: \"kubernetes.io/projected/c96e8373-9751-402b-b2bd-d8fc061c18ec-kube-api-access-d5x5f\") pod \"cert-manager-webhook-5655c58dd6-z4wp4\" (UID: \"c96e8373-9751-402b-b2bd-d8fc061c18ec\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.949972 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmdgr\" (UniqueName: \"kubernetes.io/projected/ac2a45ed-d601-4f3c-8594-05810a6bbd89-kube-api-access-jmdgr\") pod \"cert-manager-5b446d88c5-g4fbj\" (UID: \"ac2a45ed-d601-4f3c-8594-05810a6bbd89\") " pod="cert-manager/cert-manager-5b446d88c5-g4fbj" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.950006 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzqhx\" (UniqueName: \"kubernetes.io/projected/85545589-5ded-4bd2-a7d8-7ff1a449b321-kube-api-access-kzqhx\") pod \"cert-manager-cainjector-7f985d654d-kdpkl\" (UID: \"85545589-5ded-4bd2-a7d8-7ff1a449b321\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.969833 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzqhx\" (UniqueName: \"kubernetes.io/projected/85545589-5ded-4bd2-a7d8-7ff1a449b321-kube-api-access-kzqhx\") pod \"cert-manager-cainjector-7f985d654d-kdpkl\" (UID: \"85545589-5ded-4bd2-a7d8-7ff1a449b321\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" Dec 06 08:20:41 crc kubenswrapper[4763]: I1206 08:20:41.969869 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmdgr\" (UniqueName: \"kubernetes.io/projected/ac2a45ed-d601-4f3c-8594-05810a6bbd89-kube-api-access-jmdgr\") pod \"cert-manager-5b446d88c5-g4fbj\" (UID: \"ac2a45ed-d601-4f3c-8594-05810a6bbd89\") " pod="cert-manager/cert-manager-5b446d88c5-g4fbj" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.043893 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.051129 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5x5f\" (UniqueName: \"kubernetes.io/projected/c96e8373-9751-402b-b2bd-d8fc061c18ec-kube-api-access-d5x5f\") pod \"cert-manager-webhook-5655c58dd6-z4wp4\" (UID: \"c96e8373-9751-402b-b2bd-d8fc061c18ec\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.067647 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g4fbj" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.069103 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5x5f\" (UniqueName: \"kubernetes.io/projected/c96e8373-9751-402b-b2bd-d8fc061c18ec-kube-api-access-d5x5f\") pod \"cert-manager-webhook-5655c58dd6-z4wp4\" (UID: \"c96e8373-9751-402b-b2bd-d8fc061c18ec\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.077776 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.299206 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-z4wp4"] Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.321268 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.351871 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g4fbj"] Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.457379 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kdpkl"] Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.536738 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:20:42 crc kubenswrapper[4763]: I1206 08:20:42.536796 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:20:43 crc kubenswrapper[4763]: I1206 08:20:43.236354 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" event={"ID":"c96e8373-9751-402b-b2bd-d8fc061c18ec","Type":"ContainerStarted","Data":"8ef832557bd6de9cd8f0c25e9e927ae5c0728c138f51afa7ebf844b7bfb794ed"} Dec 06 08:20:43 crc kubenswrapper[4763]: I1206 08:20:43.240339 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" event={"ID":"85545589-5ded-4bd2-a7d8-7ff1a449b321","Type":"ContainerStarted","Data":"fcadb63e57aafb27ed2cd5f704d1e6994b6309c34f3fb8e07d7549ba1c280a14"} Dec 06 08:20:43 crc kubenswrapper[4763]: I1206 08:20:43.242394 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g4fbj" event={"ID":"ac2a45ed-d601-4f3c-8594-05810a6bbd89","Type":"ContainerStarted","Data":"559a4895eda4e7ccb24afb280ea5637827079ec86d99f6dd051279bdb3edbbe3"} Dec 06 08:20:47 crc kubenswrapper[4763]: I1206 08:20:47.267758 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" event={"ID":"c96e8373-9751-402b-b2bd-d8fc061c18ec","Type":"ContainerStarted","Data":"3955fee2f425fde52a54b53ba1de58d19f6b407c107c00b54ee47fb91235082a"} Dec 06 08:20:47 crc kubenswrapper[4763]: I1206 08:20:47.274708 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g4fbj" event={"ID":"ac2a45ed-d601-4f3c-8594-05810a6bbd89","Type":"ContainerStarted","Data":"88d106fef3bb5f30c1ac5cf1c938b248bc1117fc7906dae336caf6c9cb187109"} Dec 06 08:20:47 crc kubenswrapper[4763]: I1206 08:20:47.291754 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-g4fbj" podStartSLOduration=1.542463919 podStartE2EDuration="6.291727441s" podCreationTimestamp="2025-12-06 08:20:41 +0000 UTC" firstStartedPulling="2025-12-06 08:20:42.343757957 +0000 UTC m=+524.919462995" lastFinishedPulling="2025-12-06 08:20:47.093021469 +0000 UTC m=+529.668726517" observedRunningTime="2025-12-06 08:20:47.289135563 +0000 UTC m=+529.864840621" watchObservedRunningTime="2025-12-06 08:20:47.291727441 +0000 UTC m=+529.867432479" Dec 06 08:20:48 crc kubenswrapper[4763]: I1206 08:20:48.280576 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" event={"ID":"85545589-5ded-4bd2-a7d8-7ff1a449b321","Type":"ContainerStarted","Data":"259ae688682cc7f21a401b3babd71dad6806ac3f8d2cfb2f4670cf5fec14b619"} Dec 06 08:20:48 crc kubenswrapper[4763]: I1206 08:20:48.280773 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:48 crc kubenswrapper[4763]: I1206 08:20:48.295339 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" podStartSLOduration=2.518814 podStartE2EDuration="7.295319776s" podCreationTimestamp="2025-12-06 08:20:41 +0000 UTC" firstStartedPulling="2025-12-06 08:20:42.321067092 +0000 UTC m=+524.896772130" lastFinishedPulling="2025-12-06 08:20:47.097572868 +0000 UTC m=+529.673277906" observedRunningTime="2025-12-06 08:20:48.293257552 +0000 UTC m=+530.868962600" watchObservedRunningTime="2025-12-06 08:20:48.295319776 +0000 UTC m=+530.871024804" Dec 06 08:20:48 crc kubenswrapper[4763]: I1206 08:20:48.310857 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-kdpkl" podStartSLOduration=1.810503217 podStartE2EDuration="7.310839672s" podCreationTimestamp="2025-12-06 08:20:41 +0000 UTC" firstStartedPulling="2025-12-06 08:20:42.460400851 +0000 UTC m=+525.036105889" lastFinishedPulling="2025-12-06 08:20:47.960737306 +0000 UTC m=+530.536442344" observedRunningTime="2025-12-06 08:20:48.308557713 +0000 UTC m=+530.884262761" watchObservedRunningTime="2025-12-06 08:20:48.310839672 +0000 UTC m=+530.886544710" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.274669 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5lcfn"] Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275293 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-controller" containerID="cri-o://ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275354 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275374 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="northd" containerID="cri-o://54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275446 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="sbdb" containerID="cri-o://86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275399 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-node" containerID="cri-o://be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275576 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="nbdb" containerID="cri-o://4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.275555 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-acl-logging" containerID="cri-o://719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.302380 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovnkube-controller" containerID="cri-o://1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" gracePeriod=30 Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.457655 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 is running failed: container process not found" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.458282 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 is running failed: container process not found" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.458498 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.458779 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 is running failed: container process not found" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.458822 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="sbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.458954 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 is running failed: container process not found" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.459229 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 is running failed: container process not found" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.459277 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="nbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.550957 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5lcfn_45ef29bb-34fc-400a-93f2-c75d9470c9b8/ovn-acl-logging/0.log" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.551567 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5lcfn_45ef29bb-34fc-400a-93f2-c75d9470c9b8/ovn-controller/0.log" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.551999 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604547 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hj5xh"] Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604753 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-acl-logging" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604768 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-acl-logging" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604781 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-node" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604786 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-node" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604802 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604809 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604816 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovnkube-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604822 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovnkube-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604831 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kubecfg-setup" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604836 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kubecfg-setup" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604848 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="northd" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604854 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="northd" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604863 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="nbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604869 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="nbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604877 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="sbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604884 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="sbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: E1206 08:20:51.604912 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.604919 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605006 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-acl-logging" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605017 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovn-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605025 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="sbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605030 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-node" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605037 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="kube-rbac-proxy-ovn-metrics" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605044 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="nbdb" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605054 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="ovnkube-controller" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.605061 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerName="northd" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.606691 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674112 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674159 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674219 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674247 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674270 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674284 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674308 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674323 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674344 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6m6d\" (UniqueName: \"kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674356 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674369 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674387 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674415 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674438 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674458 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674475 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674492 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674514 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674539 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.674557 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert\") pod \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\" (UID: \"45ef29bb-34fc-400a-93f2-c75d9470c9b8\") " Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675283 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675311 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675356 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675374 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675391 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675614 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675661 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash" (OuterVolumeSpecName: "host-slash") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675678 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675707 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675730 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket" (OuterVolumeSpecName: "log-socket") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675746 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.675764 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676305 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676332 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log" (OuterVolumeSpecName: "node-log") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676405 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676444 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdnqb\" (UniqueName: \"kubernetes.io/projected/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-kube-api-access-sdnqb\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676469 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676476 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-systemd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676540 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-node-log\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676593 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-var-lib-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676614 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676632 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovn-node-metrics-cert\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676648 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-systemd-units\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676713 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676722 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-etc-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676801 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-bin\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676831 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676872 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-log-socket\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676919 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-script-lib\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.676965 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-slash\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677162 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-netns\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677205 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-netd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677229 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-config\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677252 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-env-overrides\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677287 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-kubelet\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677312 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677379 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-ovn\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677458 4763 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677469 4763 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677477 4763 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-log-socket\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677487 4763 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677496 4763 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677504 4763 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677512 4763 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677520 4763 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-slash\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677529 4763 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677537 4763 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677548 4763 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677558 4763 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677566 4763 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-node-log\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677574 4763 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677581 4763 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677589 4763 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.677599 4763 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.680535 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d" (OuterVolumeSpecName: "kube-api-access-z6m6d") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "kube-api-access-z6m6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.680719 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.690037 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "45ef29bb-34fc-400a-93f2-c75d9470c9b8" (UID: "45ef29bb-34fc-400a-93f2-c75d9470c9b8"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779557 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-slash\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779633 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-netns\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779657 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-netd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-config\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779710 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-env-overrides\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779708 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-slash\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779738 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-kubelet\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779786 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-netd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779794 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779846 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779891 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-netns\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779933 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-ovn\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779973 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdnqb\" (UniqueName: \"kubernetes.io/projected/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-kube-api-access-sdnqb\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780061 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-ovn\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780378 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-env-overrides\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.779943 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-kubelet\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780438 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-config\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780700 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-systemd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-systemd\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780751 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-node-log\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780784 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-var-lib-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780812 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780817 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-node-log\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780865 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovn-node-metrics-cert\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780915 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-systemd-units\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780945 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-etc-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780979 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-systemd-units\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.780914 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-var-lib-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781012 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-bin\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781043 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781077 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-log-socket\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781099 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-script-lib\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781039 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-etc-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781206 4763 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/45ef29bb-34fc-400a-93f2-c75d9470c9b8-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781220 4763 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/45ef29bb-34fc-400a-93f2-c75d9470c9b8-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781232 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6m6d\" (UniqueName: \"kubernetes.io/projected/45ef29bb-34fc-400a-93f2-c75d9470c9b8-kube-api-access-z6m6d\") on node \"crc\" DevicePath \"\"" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781258 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-cni-bin\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781263 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-log-socket\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781368 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-run-openvswitch\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781416 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-host-run-ovn-kubernetes\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.781669 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovnkube-script-lib\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.783548 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-ovn-node-metrics-cert\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.795259 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdnqb\" (UniqueName: \"kubernetes.io/projected/ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1-kube-api-access-sdnqb\") pod \"ovnkube-node-hj5xh\" (UID: \"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1\") " pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: I1206 08:20:51.924435 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:51 crc kubenswrapper[4763]: W1206 08:20:51.945106 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad5effbb_97e3_498b_ab5b_6dd69fb3f5d1.slice/crio-e15cb1762f46802bd8241b5f94f22534a8f9c594ab35b2186415cd247853b409 WatchSource:0}: Error finding container e15cb1762f46802bd8241b5f94f22534a8f9c594ab35b2186415cd247853b409: Status 404 returned error can't find the container with id e15cb1762f46802bd8241b5f94f22534a8f9c594ab35b2186415cd247853b409 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.082538 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-z4wp4" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.300852 4763 generic.go:334] "Generic (PLEG): container finished" podID="ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1" containerID="7d8b50520f46bbc8fd7e611841e156bfc7a0c45288e37fa04e008616c4698c5c" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.301510 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerDied","Data":"7d8b50520f46bbc8fd7e611841e156bfc7a0c45288e37fa04e008616c4698c5c"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.301680 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"e15cb1762f46802bd8241b5f94f22534a8f9c594ab35b2186415cd247853b409"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.303226 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm2pv_8dc562e3-8b35-4486-8731-dc26218daf86/kube-multus/0.log" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.303284 4763 generic.go:334] "Generic (PLEG): container finished" podID="8dc562e3-8b35-4486-8731-dc26218daf86" containerID="2306e1b8490b106c47ba5d9c90a11e5123ee0a441a0f62b347eafd32c269e9d6" exitCode=2 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.303376 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm2pv" event={"ID":"8dc562e3-8b35-4486-8731-dc26218daf86","Type":"ContainerDied","Data":"2306e1b8490b106c47ba5d9c90a11e5123ee0a441a0f62b347eafd32c269e9d6"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.304063 4763 scope.go:117] "RemoveContainer" containerID="2306e1b8490b106c47ba5d9c90a11e5123ee0a441a0f62b347eafd32c269e9d6" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.311635 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5lcfn_45ef29bb-34fc-400a-93f2-c75d9470c9b8/ovn-acl-logging/0.log" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312213 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5lcfn_45ef29bb-34fc-400a-93f2-c75d9470c9b8/ovn-controller/0.log" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312565 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312718 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312729 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312735 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312743 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312750 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" exitCode=0 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312756 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" exitCode=143 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312763 4763 generic.go:334] "Generic (PLEG): container finished" podID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" exitCode=143 Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312703 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312643 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312977 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.312993 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313003 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313014 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313026 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313037 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313046 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313052 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313059 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313066 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313073 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313078 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313083 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313088 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313093 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313098 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313102 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313107 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313114 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313122 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313129 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313134 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313138 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313143 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313148 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313153 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313158 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313162 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313169 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5lcfn" event={"ID":"45ef29bb-34fc-400a-93f2-c75d9470c9b8","Type":"ContainerDied","Data":"06a74ed727c8e7de3a1553d52aa00788badbf51bb62410c10d6e4089437ab9c7"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313177 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313183 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313188 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313193 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313198 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313203 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313208 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313212 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313217 4763 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.313231 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.338327 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.359049 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5lcfn"] Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.364466 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5lcfn"] Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.374219 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.390082 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.422504 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.440920 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.453413 4763 scope.go:117] "RemoveContainer" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.464608 4763 scope.go:117] "RemoveContainer" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.478176 4763 scope.go:117] "RemoveContainer" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.489158 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.489959 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.489988 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} err="failed to get container status \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.490009 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.490314 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.490361 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} err="failed to get container status \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.490394 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.490712 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.490740 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} err="failed to get container status \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.490755 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.491236 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491254 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} err="failed to get container status \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491267 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.491599 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491626 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} err="failed to get container status \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491644 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.491907 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491927 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} err="failed to get container status \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.491939 4763 scope.go:117] "RemoveContainer" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.492163 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": container with ID starting with 719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29 not found: ID does not exist" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492185 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} err="failed to get container status \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": rpc error: code = NotFound desc = could not find container \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": container with ID starting with 719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492199 4763 scope.go:117] "RemoveContainer" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.492500 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": container with ID starting with ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660 not found: ID does not exist" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492517 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} err="failed to get container status \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": rpc error: code = NotFound desc = could not find container \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": container with ID starting with ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492528 4763 scope.go:117] "RemoveContainer" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: E1206 08:20:52.492834 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": container with ID starting with 0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f not found: ID does not exist" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492863 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} err="failed to get container status \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": rpc error: code = NotFound desc = could not find container \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": container with ID starting with 0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.492880 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493173 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} err="failed to get container status \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493202 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493408 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} err="failed to get container status \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493441 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493651 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} err="failed to get container status \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.493681 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494009 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} err="failed to get container status \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494041 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494355 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} err="failed to get container status \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494374 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494593 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} err="failed to get container status \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494612 4763 scope.go:117] "RemoveContainer" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494796 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} err="failed to get container status \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": rpc error: code = NotFound desc = could not find container \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": container with ID starting with 719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494816 4763 scope.go:117] "RemoveContainer" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.494986 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} err="failed to get container status \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": rpc error: code = NotFound desc = could not find container \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": container with ID starting with ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.495004 4763 scope.go:117] "RemoveContainer" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.495199 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} err="failed to get container status \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": rpc error: code = NotFound desc = could not find container \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": container with ID starting with 0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.495219 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.495641 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} err="failed to get container status \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.495657 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496112 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} err="failed to get container status \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496131 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496399 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} err="failed to get container status \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496427 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496717 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} err="failed to get container status \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.496733 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497067 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} err="failed to get container status \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497096 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497435 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} err="failed to get container status \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497452 4763 scope.go:117] "RemoveContainer" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497750 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} err="failed to get container status \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": rpc error: code = NotFound desc = could not find container \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": container with ID starting with 719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.497767 4763 scope.go:117] "RemoveContainer" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498079 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} err="failed to get container status \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": rpc error: code = NotFound desc = could not find container \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": container with ID starting with ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498105 4763 scope.go:117] "RemoveContainer" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498425 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} err="failed to get container status \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": rpc error: code = NotFound desc = could not find container \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": container with ID starting with 0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498443 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498694 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} err="failed to get container status \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498709 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.498986 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} err="failed to get container status \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499003 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499267 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} err="failed to get container status \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499283 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499611 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} err="failed to get container status \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499636 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499941 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} err="failed to get container status \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.499987 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500242 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} err="failed to get container status \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500261 4763 scope.go:117] "RemoveContainer" containerID="719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500528 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29"} err="failed to get container status \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": rpc error: code = NotFound desc = could not find container \"719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29\": container with ID starting with 719c367c29e15bbc09332fcb78866eb1d2e66bbacf2394f561ec7c0c29c2fe29 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500544 4763 scope.go:117] "RemoveContainer" containerID="ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500724 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660"} err="failed to get container status \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": rpc error: code = NotFound desc = could not find container \"ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660\": container with ID starting with ac39022903924c6342d8a919b91313c4618182d407ebb9bf185fcd13bfd8b660 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.500746 4763 scope.go:117] "RemoveContainer" containerID="0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501066 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f"} err="failed to get container status \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": rpc error: code = NotFound desc = could not find container \"0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f\": container with ID starting with 0e3608b77a11a481607b7126b9ca7c95767b3a3116ba2e94017bc6798cd3773f not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501084 4763 scope.go:117] "RemoveContainer" containerID="1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501300 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb"} err="failed to get container status \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": rpc error: code = NotFound desc = could not find container \"1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb\": container with ID starting with 1adcd6140b91de607ef6932d2eec4297298384e690c38fec67089e272e5fe5fb not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501324 4763 scope.go:117] "RemoveContainer" containerID="86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501683 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09"} err="failed to get container status \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": rpc error: code = NotFound desc = could not find container \"86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09\": container with ID starting with 86cebad0fc1210096de4fcc0fa0e95f18d78f2f2f423b46963cb3fffe5d6ee09 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501700 4763 scope.go:117] "RemoveContainer" containerID="4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501970 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9"} err="failed to get container status \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": rpc error: code = NotFound desc = could not find container \"4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9\": container with ID starting with 4ed9c634e89f1eaef06fb5dd5eab8e9a07e0c50a06a8b1cc7df353978aa540a9 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.501987 4763 scope.go:117] "RemoveContainer" containerID="54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.502288 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255"} err="failed to get container status \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": rpc error: code = NotFound desc = could not find container \"54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255\": container with ID starting with 54d6d83a3fd5ec75c5df1c9c85aca39d22a04f120332e4f62172c96366276255 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.502306 4763 scope.go:117] "RemoveContainer" containerID="ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.502667 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56"} err="failed to get container status \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": rpc error: code = NotFound desc = could not find container \"ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56\": container with ID starting with ea7cedba7991772b051b2d69c228a0c45b8881abb86c4132275b29cdc9d78f56 not found: ID does not exist" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.502693 4763 scope.go:117] "RemoveContainer" containerID="be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a" Dec 06 08:20:52 crc kubenswrapper[4763]: I1206 08:20:52.502979 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a"} err="failed to get container status \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": rpc error: code = NotFound desc = could not find container \"be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a\": container with ID starting with be5e37ccbbc084db36b0d52ad0995d2c364f1680bfc85f6570ad7dd7b9352d4a not found: ID does not exist" Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.319624 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vm2pv_8dc562e3-8b35-4486-8731-dc26218daf86/kube-multus/0.log" Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.319960 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vm2pv" event={"ID":"8dc562e3-8b35-4486-8731-dc26218daf86","Type":"ContainerStarted","Data":"709fa3f14d21fa4d0e09710b3b6f53d67f239de1e5145c05b870ae1cd629a3bc"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.326718 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"2adb8fe7060da757d8e0663c6a3bcd7f261b1b97089546d83765178b9a17b606"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.327091 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"94fb89e04fa9a6e78e211d1d8f992f8060d7f2fb11c4111961ee73aa000b2380"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.327109 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"c9e0156d19527aecd976a199da08170d736aa01dcb735bde7134aecaea1c5c31"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.327119 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"abec775884e01213f9c9c6727cb22450c4795c65ead49cf91cfd6cf6dfa1e610"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.327129 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"5b9d95c99565c580e862e689a149f1b8eb2a78e215babc123f1d7a60860e4398"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.327140 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"31b845371607006e1b482d0f7f40dd8716a1a5629a546e57bd9c852eff9b8879"} Dec 06 08:20:53 crc kubenswrapper[4763]: I1206 08:20:53.727702 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45ef29bb-34fc-400a-93f2-c75d9470c9b8" path="/var/lib/kubelet/pods/45ef29bb-34fc-400a-93f2-c75d9470c9b8/volumes" Dec 06 08:20:56 crc kubenswrapper[4763]: I1206 08:20:56.346283 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"919184ec4069f83c39f81f8ef54827a29aed2a381c210130e0710de845dc20af"} Dec 06 08:20:58 crc kubenswrapper[4763]: I1206 08:20:58.360308 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" event={"ID":"ad5effbb-97e3-498b-ab5b-6dd69fb3f5d1","Type":"ContainerStarted","Data":"77bc77d354ea63964221d4ff82939636ed61acc5b5ad1e0488d687f9c78bdf75"} Dec 06 08:20:58 crc kubenswrapper[4763]: I1206 08:20:58.360886 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:58 crc kubenswrapper[4763]: I1206 08:20:58.360918 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:58 crc kubenswrapper[4763]: I1206 08:20:58.393795 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" podStartSLOduration=7.393769548 podStartE2EDuration="7.393769548s" podCreationTimestamp="2025-12-06 08:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:20:58.386340322 +0000 UTC m=+540.962045400" watchObservedRunningTime="2025-12-06 08:20:58.393769548 +0000 UTC m=+540.969474596" Dec 06 08:20:58 crc kubenswrapper[4763]: I1206 08:20:58.396296 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:59 crc kubenswrapper[4763]: I1206 08:20:59.365231 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:20:59 crc kubenswrapper[4763]: I1206 08:20:59.442016 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:21:12 crc kubenswrapper[4763]: I1206 08:21:12.536995 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:21:12 crc kubenswrapper[4763]: I1206 08:21:12.537641 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:21:21 crc kubenswrapper[4763]: I1206 08:21:21.946274 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hj5xh" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.645687 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm"] Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.647379 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.650034 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.663191 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm"] Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.828561 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72sjg\" (UniqueName: \"kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.828768 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.828877 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.929881 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72sjg\" (UniqueName: \"kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.929976 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.930012 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.930439 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.930476 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.948660 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72sjg\" (UniqueName: \"kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:25 crc kubenswrapper[4763]: I1206 08:21:25.964150 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:26 crc kubenswrapper[4763]: I1206 08:21:26.365324 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm"] Dec 06 08:21:26 crc kubenswrapper[4763]: I1206 08:21:26.511041 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" event={"ID":"af427750-c93a-4698-bc6b-a73202bdfeb7","Type":"ContainerStarted","Data":"dc551dfa04ee739002ec17781bb84f7804103fa98cefde137af3e9eec7f6a9c9"} Dec 06 08:21:27 crc kubenswrapper[4763]: I1206 08:21:27.517309 4763 generic.go:334] "Generic (PLEG): container finished" podID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerID="04e66dedfea66c7250245f063b3ba121fe082ccbe6ec43150d7ca20484daf012" exitCode=0 Dec 06 08:21:27 crc kubenswrapper[4763]: I1206 08:21:27.517366 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" event={"ID":"af427750-c93a-4698-bc6b-a73202bdfeb7","Type":"ContainerDied","Data":"04e66dedfea66c7250245f063b3ba121fe082ccbe6ec43150d7ca20484daf012"} Dec 06 08:21:33 crc kubenswrapper[4763]: I1206 08:21:33.552313 4763 generic.go:334] "Generic (PLEG): container finished" podID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerID="7c96ace5696eae31cb2aa0571b9926c39ed3d578b7820bd536270df2da0ed2ac" exitCode=0 Dec 06 08:21:33 crc kubenswrapper[4763]: I1206 08:21:33.552355 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" event={"ID":"af427750-c93a-4698-bc6b-a73202bdfeb7","Type":"ContainerDied","Data":"7c96ace5696eae31cb2aa0571b9926c39ed3d578b7820bd536270df2da0ed2ac"} Dec 06 08:21:34 crc kubenswrapper[4763]: I1206 08:21:34.560718 4763 generic.go:334] "Generic (PLEG): container finished" podID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerID="757ba0b75241c5be0bfa142fb32b65f87ec0152bd2e3a6f3c8ff20dce67f4bbb" exitCode=0 Dec 06 08:21:34 crc kubenswrapper[4763]: I1206 08:21:34.560804 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" event={"ID":"af427750-c93a-4698-bc6b-a73202bdfeb7","Type":"ContainerDied","Data":"757ba0b75241c5be0bfa142fb32b65f87ec0152bd2e3a6f3c8ff20dce67f4bbb"} Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.780890 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.954274 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72sjg\" (UniqueName: \"kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg\") pod \"af427750-c93a-4698-bc6b-a73202bdfeb7\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.954333 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle\") pod \"af427750-c93a-4698-bc6b-a73202bdfeb7\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.954348 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util\") pod \"af427750-c93a-4698-bc6b-a73202bdfeb7\" (UID: \"af427750-c93a-4698-bc6b-a73202bdfeb7\") " Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.956360 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle" (OuterVolumeSpecName: "bundle") pod "af427750-c93a-4698-bc6b-a73202bdfeb7" (UID: "af427750-c93a-4698-bc6b-a73202bdfeb7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.959742 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg" (OuterVolumeSpecName: "kube-api-access-72sjg") pod "af427750-c93a-4698-bc6b-a73202bdfeb7" (UID: "af427750-c93a-4698-bc6b-a73202bdfeb7"). InnerVolumeSpecName "kube-api-access-72sjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:21:35 crc kubenswrapper[4763]: I1206 08:21:35.966297 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util" (OuterVolumeSpecName: "util") pod "af427750-c93a-4698-bc6b-a73202bdfeb7" (UID: "af427750-c93a-4698-bc6b-a73202bdfeb7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.056267 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72sjg\" (UniqueName: \"kubernetes.io/projected/af427750-c93a-4698-bc6b-a73202bdfeb7-kube-api-access-72sjg\") on node \"crc\" DevicePath \"\"" Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.056301 4763 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.056310 4763 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/af427750-c93a-4698-bc6b-a73202bdfeb7-util\") on node \"crc\" DevicePath \"\"" Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.575869 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" event={"ID":"af427750-c93a-4698-bc6b-a73202bdfeb7","Type":"ContainerDied","Data":"dc551dfa04ee739002ec17781bb84f7804103fa98cefde137af3e9eec7f6a9c9"} Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.575932 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm" Dec 06 08:21:36 crc kubenswrapper[4763]: I1206 08:21:36.575949 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc551dfa04ee739002ec17781bb84f7804103fa98cefde137af3e9eec7f6a9c9" Dec 06 08:21:42 crc kubenswrapper[4763]: I1206 08:21:42.536679 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:21:42 crc kubenswrapper[4763]: I1206 08:21:42.537039 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:21:42 crc kubenswrapper[4763]: I1206 08:21:42.537091 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:21:42 crc kubenswrapper[4763]: I1206 08:21:42.537658 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:21:42 crc kubenswrapper[4763]: I1206 08:21:42.537714 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f" gracePeriod=600 Dec 06 08:21:43 crc kubenswrapper[4763]: I1206 08:21:43.623839 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f" exitCode=0 Dec 06 08:21:43 crc kubenswrapper[4763]: I1206 08:21:43.624134 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f"} Dec 06 08:21:43 crc kubenswrapper[4763]: I1206 08:21:43.624162 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a"} Dec 06 08:21:43 crc kubenswrapper[4763]: I1206 08:21:43.624181 4763 scope.go:117] "RemoveContainer" containerID="934262ae433b5543bf19f01f5875f6e74c9fb7bfaabca2d290eb9294c73853b5" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.208661 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2"] Dec 06 08:21:48 crc kubenswrapper[4763]: E1206 08:21:48.209364 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="util" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.209377 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="util" Dec 06 08:21:48 crc kubenswrapper[4763]: E1206 08:21:48.209390 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="extract" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.209396 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="extract" Dec 06 08:21:48 crc kubenswrapper[4763]: E1206 08:21:48.209404 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="pull" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.209411 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="pull" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.209514 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="af427750-c93a-4698-bc6b-a73202bdfeb7" containerName="extract" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.209916 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.211956 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-6xln5" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.212937 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.213519 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.223204 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.256231 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.257135 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.259558 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-vjcbv" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.263685 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.264565 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.265406 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.271037 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.277501 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.298791 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h29jt\" (UniqueName: \"kubernetes.io/projected/e12a3a42-3cdb-490c-86f4-cf0bfbfdde37-kube-api-access-h29jt\") pod \"obo-prometheus-operator-668cf9dfbb-8vpz2\" (UID: \"e12a3a42-3cdb-490c-86f4-cf0bfbfdde37\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.400321 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.400363 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.400387 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.400406 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.400490 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h29jt\" (UniqueName: \"kubernetes.io/projected/e12a3a42-3cdb-490c-86f4-cf0bfbfdde37-kube-api-access-h29jt\") pod \"obo-prometheus-operator-668cf9dfbb-8vpz2\" (UID: \"e12a3a42-3cdb-490c-86f4-cf0bfbfdde37\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.433811 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v6zq6"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.434487 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: W1206 08:21:48.436493 4763 reflector.go:561] object-"openshift-operators"/"observability-operator-tls": failed to list *v1.Secret: secrets "observability-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 06 08:21:48 crc kubenswrapper[4763]: E1206 08:21:48.436538 4763 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 08:21:48 crc kubenswrapper[4763]: W1206 08:21:48.436647 4763 reflector.go:561] object-"openshift-operators"/"observability-operator-sa-dockercfg-4pgc7": failed to list *v1.Secret: secrets "observability-operator-sa-dockercfg-4pgc7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 06 08:21:48 crc kubenswrapper[4763]: E1206 08:21:48.436667 4763 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-sa-dockercfg-4pgc7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-sa-dockercfg-4pgc7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.439675 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h29jt\" (UniqueName: \"kubernetes.io/projected/e12a3a42-3cdb-490c-86f4-cf0bfbfdde37-kube-api-access-h29jt\") pod \"obo-prometheus-operator-668cf9dfbb-8vpz2\" (UID: \"e12a3a42-3cdb-490c-86f4-cf0bfbfdde37\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.501233 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.501287 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.501325 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.501347 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.506656 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.508327 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/45272139-b882-4b1e-a1e9-1b570c6f74ec-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng\" (UID: \"45272139-b882-4b1e-a1e9-1b570c6f74ec\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.510539 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.510974 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa21f3ec-f2cd-4c50-b2f6-831de68b3e61-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf\" (UID: \"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.521636 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v6zq6"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.528127 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.578360 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.589179 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.602133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jwkt\" (UniqueName: \"kubernetes.io/projected/7cee436d-c942-4862-80b9-ba1633c94c45-kube-api-access-4jwkt\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.602475 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7cee436d-c942-4862-80b9-ba1633c94c45-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.693351 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-mm44k"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.694362 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.698147 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-gjgmc" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.703523 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7cee436d-c942-4862-80b9-ba1633c94c45-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.703633 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jwkt\" (UniqueName: \"kubernetes.io/projected/7cee436d-c942-4862-80b9-ba1633c94c45-kube-api-access-4jwkt\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.709188 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-mm44k"] Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.748847 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jwkt\" (UniqueName: \"kubernetes.io/projected/7cee436d-c942-4862-80b9-ba1633c94c45-kube-api-access-4jwkt\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.804926 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4mwl\" (UniqueName: \"kubernetes.io/projected/fffa7ba6-3524-4812-8c4c-14d616125be7-kube-api-access-q4mwl\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.804989 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffa7ba6-3524-4812-8c4c-14d616125be7-openshift-service-ca\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.907495 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4mwl\" (UniqueName: \"kubernetes.io/projected/fffa7ba6-3524-4812-8c4c-14d616125be7-kube-api-access-q4mwl\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.907547 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffa7ba6-3524-4812-8c4c-14d616125be7-openshift-service-ca\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.908418 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffa7ba6-3524-4812-8c4c-14d616125be7-openshift-service-ca\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.926877 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4mwl\" (UniqueName: \"kubernetes.io/projected/fffa7ba6-3524-4812-8c4c-14d616125be7-kube-api-access-q4mwl\") pod \"perses-operator-5446b9c989-mm44k\" (UID: \"fffa7ba6-3524-4812-8c4c-14d616125be7\") " pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:48 crc kubenswrapper[4763]: I1206 08:21:48.935672 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf"] Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.057624 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.099387 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2"] Dec 06 08:21:49 crc kubenswrapper[4763]: W1206 08:21:49.107748 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode12a3a42_3cdb_490c_86f4_cf0bfbfdde37.slice/crio-409eed52db4431d18390f05e4996a270a265ba294dbe733be14e83988a4085fd WatchSource:0}: Error finding container 409eed52db4431d18390f05e4996a270a265ba294dbe733be14e83988a4085fd: Status 404 returned error can't find the container with id 409eed52db4431d18390f05e4996a270a265ba294dbe733be14e83988a4085fd Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.183804 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng"] Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.284555 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-mm44k"] Dec 06 08:21:49 crc kubenswrapper[4763]: W1206 08:21:49.296075 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfffa7ba6_3524_4812_8c4c_14d616125be7.slice/crio-3a00d7da3fae5e2456a9e52867ac0b0c8feb3b718a7c550eac49e557357c997d WatchSource:0}: Error finding container 3a00d7da3fae5e2456a9e52867ac0b0c8feb3b718a7c550eac49e557357c997d: Status 404 returned error can't find the container with id 3a00d7da3fae5e2456a9e52867ac0b0c8feb3b718a7c550eac49e557357c997d Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.365974 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.381339 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7cee436d-c942-4862-80b9-ba1633c94c45-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v6zq6\" (UID: \"7cee436d-c942-4862-80b9-ba1633c94c45\") " pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.658218 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" event={"ID":"45272139-b882-4b1e-a1e9-1b570c6f74ec","Type":"ContainerStarted","Data":"7267c7b95ae02ba935e3244d9394847d838b7fcc521092c58d19d032ba1c1887"} Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.659200 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" event={"ID":"e12a3a42-3cdb-490c-86f4-cf0bfbfdde37","Type":"ContainerStarted","Data":"409eed52db4431d18390f05e4996a270a265ba294dbe733be14e83988a4085fd"} Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.660377 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-mm44k" event={"ID":"fffa7ba6-3524-4812-8c4c-14d616125be7","Type":"ContainerStarted","Data":"3a00d7da3fae5e2456a9e52867ac0b0c8feb3b718a7c550eac49e557357c997d"} Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.661830 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" event={"ID":"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61","Type":"ContainerStarted","Data":"76369126f7d704dba010cfa12f4a2602dd8b2296f8e4e435ec25062dec0db2ce"} Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.741651 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-4pgc7" Dec 06 08:21:49 crc kubenswrapper[4763]: I1206 08:21:49.742686 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:21:50 crc kubenswrapper[4763]: I1206 08:21:50.026390 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v6zq6"] Dec 06 08:21:50 crc kubenswrapper[4763]: W1206 08:21:50.039815 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cee436d_c942_4862_80b9_ba1633c94c45.slice/crio-8f737e25e7e078971ffef752d9f655b131143aeebeab6fa713f93fc096681667 WatchSource:0}: Error finding container 8f737e25e7e078971ffef752d9f655b131143aeebeab6fa713f93fc096681667: Status 404 returned error can't find the container with id 8f737e25e7e078971ffef752d9f655b131143aeebeab6fa713f93fc096681667 Dec 06 08:21:50 crc kubenswrapper[4763]: I1206 08:21:50.682478 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" event={"ID":"7cee436d-c942-4862-80b9-ba1633c94c45","Type":"ContainerStarted","Data":"8f737e25e7e078971ffef752d9f655b131143aeebeab6fa713f93fc096681667"} Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.109061 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.109677 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h29jt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-8vpz2_openshift-operators(e12a3a42-3cdb-490c-86f4-cf0bfbfdde37): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.110870 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" podUID="e12a3a42-3cdb-490c-86f4-cf0bfbfdde37" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.525878 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.526094 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q4mwl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-mm44k_openshift-operators(fffa7ba6-3524-4812-8c4c-14d616125be7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.527292 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-mm44k" podUID="fffa7ba6-3524-4812-8c4c-14d616125be7" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.841244 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.841429 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng_openshift-operators(45272139-b882-4b1e-a1e9-1b570c6f74ec): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.842535 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" podUID="45272139-b882-4b1e-a1e9-1b570c6f74ec" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.853478 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-mm44k" podUID="fffa7ba6-3524-4812-8c4c-14d616125be7" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.853763 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" podUID="e12a3a42-3cdb-490c-86f4-cf0bfbfdde37" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.905347 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.905800 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf_openshift-operators(fa21f3ec-f2cd-4c50-b2f6-831de68b3e61): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:22:05 crc kubenswrapper[4763]: E1206 08:22:05.906996 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" podUID="fa21f3ec-f2cd-4c50-b2f6-831de68b3e61" Dec 06 08:22:06 crc kubenswrapper[4763]: E1206 08:22:06.849703 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" podUID="45272139-b882-4b1e-a1e9-1b570c6f74ec" Dec 06 08:22:06 crc kubenswrapper[4763]: E1206 08:22:06.849703 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" podUID="fa21f3ec-f2cd-4c50-b2f6-831de68b3e61" Dec 06 08:22:07 crc kubenswrapper[4763]: E1206 08:22:07.377409 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 06 08:22:07 crc kubenswrapper[4763]: E1206 08:22:07.377646 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4jwkt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-v6zq6_openshift-operators(7cee436d-c942-4862-80b9-ba1633c94c45): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 06 08:22:07 crc kubenswrapper[4763]: E1206 08:22:07.378913 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" podUID="7cee436d-c942-4862-80b9-ba1633c94c45" Dec 06 08:22:07 crc kubenswrapper[4763]: E1206 08:22:07.858278 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" podUID="7cee436d-c942-4862-80b9-ba1633c94c45" Dec 06 08:22:19 crc kubenswrapper[4763]: I1206 08:22:19.921117 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" event={"ID":"45272139-b882-4b1e-a1e9-1b570c6f74ec","Type":"ContainerStarted","Data":"a3e792771adb8ca2805bd6796341380d648a70e82a49dada9a33fc45583c0e49"} Dec 06 08:22:19 crc kubenswrapper[4763]: I1206 08:22:19.940359 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng" podStartSLOduration=2.154617975 podStartE2EDuration="31.940335987s" podCreationTimestamp="2025-12-06 08:21:48 +0000 UTC" firstStartedPulling="2025-12-06 08:21:49.191758137 +0000 UTC m=+591.767463175" lastFinishedPulling="2025-12-06 08:22:18.977476159 +0000 UTC m=+621.553181187" observedRunningTime="2025-12-06 08:22:19.937216797 +0000 UTC m=+622.512921835" watchObservedRunningTime="2025-12-06 08:22:19.940335987 +0000 UTC m=+622.516041025" Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.941378 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" event={"ID":"7cee436d-c942-4862-80b9-ba1633c94c45","Type":"ContainerStarted","Data":"ab74c1825523c125cedb2f7a216c99487b0021195256d60d7f2a536299216618"} Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.942944 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.949433 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" event={"ID":"fa21f3ec-f2cd-4c50-b2f6-831de68b3e61","Type":"ContainerStarted","Data":"81d4cc70037e0ebe3ed133221466422d85395fed96184b82d71315830e8b6e3d"} Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.952594 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" event={"ID":"e12a3a42-3cdb-490c-86f4-cf0bfbfdde37","Type":"ContainerStarted","Data":"ef8c2944fdcc2bc48b9055cbbe36e8e5784bc9c3cf568193363b4e9fdc38af8a"} Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.972973 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" podStartSLOduration=3.139472484 podStartE2EDuration="33.972956126s" podCreationTimestamp="2025-12-06 08:21:48 +0000 UTC" firstStartedPulling="2025-12-06 08:21:50.043152766 +0000 UTC m=+592.618857804" lastFinishedPulling="2025-12-06 08:22:20.876636408 +0000 UTC m=+623.452341446" observedRunningTime="2025-12-06 08:22:21.969216529 +0000 UTC m=+624.544921587" watchObservedRunningTime="2025-12-06 08:22:21.972956126 +0000 UTC m=+624.548661164" Dec 06 08:22:21 crc kubenswrapper[4763]: I1206 08:22:21.974357 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-v6zq6" Dec 06 08:22:22 crc kubenswrapper[4763]: I1206 08:22:22.004051 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf" podStartSLOduration=-9223372002.850748 podStartE2EDuration="34.004028661s" podCreationTimestamp="2025-12-06 08:21:48 +0000 UTC" firstStartedPulling="2025-12-06 08:21:48.951803217 +0000 UTC m=+591.527508265" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:22:22.000357207 +0000 UTC m=+624.576062255" watchObservedRunningTime="2025-12-06 08:22:22.004028661 +0000 UTC m=+624.579733699" Dec 06 08:22:22 crc kubenswrapper[4763]: I1206 08:22:22.019133 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-8vpz2" podStartSLOduration=1.5750270419999999 podStartE2EDuration="34.019118673s" podCreationTimestamp="2025-12-06 08:21:48 +0000 UTC" firstStartedPulling="2025-12-06 08:21:49.110958423 +0000 UTC m=+591.686663461" lastFinishedPulling="2025-12-06 08:22:21.555050054 +0000 UTC m=+624.130755092" observedRunningTime="2025-12-06 08:22:22.018878607 +0000 UTC m=+624.594583645" watchObservedRunningTime="2025-12-06 08:22:22.019118673 +0000 UTC m=+624.594823711" Dec 06 08:22:22 crc kubenswrapper[4763]: I1206 08:22:22.959174 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-mm44k" event={"ID":"fffa7ba6-3524-4812-8c4c-14d616125be7","Type":"ContainerStarted","Data":"ea388cb05b91790c8837ff701a61a45973ee57a8e133ae1d6d8fdd646f01403a"} Dec 06 08:22:22 crc kubenswrapper[4763]: I1206 08:22:22.959495 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:22:22 crc kubenswrapper[4763]: I1206 08:22:22.979227 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-mm44k" podStartSLOduration=2.380669216 podStartE2EDuration="34.97921287s" podCreationTimestamp="2025-12-06 08:21:48 +0000 UTC" firstStartedPulling="2025-12-06 08:21:49.297614711 +0000 UTC m=+591.873319749" lastFinishedPulling="2025-12-06 08:22:21.896158365 +0000 UTC m=+624.471863403" observedRunningTime="2025-12-06 08:22:22.976466869 +0000 UTC m=+625.552171907" watchObservedRunningTime="2025-12-06 08:22:22.97921287 +0000 UTC m=+625.554917908" Dec 06 08:22:29 crc kubenswrapper[4763]: I1206 08:22:29.060408 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-mm44k" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.756557 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8"] Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.758353 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.760990 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.766226 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8"] Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.793975 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42f4x\" (UniqueName: \"kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.794108 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.794557 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.895345 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42f4x\" (UniqueName: \"kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.895417 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.895438 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.895983 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.896064 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:48 crc kubenswrapper[4763]: I1206 08:22:48.914818 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42f4x\" (UniqueName: \"kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:49 crc kubenswrapper[4763]: I1206 08:22:49.074805 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:49 crc kubenswrapper[4763]: I1206 08:22:49.608838 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8"] Dec 06 08:22:50 crc kubenswrapper[4763]: I1206 08:22:50.096870 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerStarted","Data":"a82a94af40374d6bab9346f52ad3d5146617c0f4312f953774dbeaf2b6530477"} Dec 06 08:22:50 crc kubenswrapper[4763]: I1206 08:22:50.097165 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerStarted","Data":"6375fac2fe0139ad188778643609f5a0bada0286bd6bdba0567a5a34960978fd"} Dec 06 08:22:50 crc kubenswrapper[4763]: I1206 08:22:50.880395 4763 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.111176 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerID="a82a94af40374d6bab9346f52ad3d5146617c0f4312f953774dbeaf2b6530477" exitCode=0 Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.111215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerDied","Data":"a82a94af40374d6bab9346f52ad3d5146617c0f4312f953774dbeaf2b6530477"} Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.130608 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.131854 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.138787 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.257136 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.257186 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.257249 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9nww\" (UniqueName: \"kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.358180 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9nww\" (UniqueName: \"kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.358289 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.358318 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.358763 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.359234 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.378928 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9nww\" (UniqueName: \"kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww\") pod \"redhat-operators-6czsc\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.460880 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:22:51 crc kubenswrapper[4763]: I1206 08:22:51.809216 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:22:52 crc kubenswrapper[4763]: I1206 08:22:52.120041 4763 generic.go:334] "Generic (PLEG): container finished" podID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerID="c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21" exitCode=0 Dec 06 08:22:52 crc kubenswrapper[4763]: I1206 08:22:52.120101 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerDied","Data":"c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21"} Dec 06 08:22:52 crc kubenswrapper[4763]: I1206 08:22:52.120341 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerStarted","Data":"364bc916d15bb4d6e69370bf8961bd9eb4de67abe25a7047c0daa740324c440b"} Dec 06 08:22:53 crc kubenswrapper[4763]: I1206 08:22:53.143494 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerID="4df37f77b78ef9f66d9dde07a4f3b667d1f1ab0bc8dba5c5f90a91089f12b21b" exitCode=0 Dec 06 08:22:53 crc kubenswrapper[4763]: I1206 08:22:53.143570 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerDied","Data":"4df37f77b78ef9f66d9dde07a4f3b667d1f1ab0bc8dba5c5f90a91089f12b21b"} Dec 06 08:22:53 crc kubenswrapper[4763]: I1206 08:22:53.147351 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerStarted","Data":"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c"} Dec 06 08:22:54 crc kubenswrapper[4763]: I1206 08:22:54.153584 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerID="de01be12e28d342f7e2016e899460bc5fd111a8c602ad9b679bfc7bcae85125f" exitCode=0 Dec 06 08:22:54 crc kubenswrapper[4763]: I1206 08:22:54.154110 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerDied","Data":"de01be12e28d342f7e2016e899460bc5fd111a8c602ad9b679bfc7bcae85125f"} Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.632766 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.784053 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42f4x\" (UniqueName: \"kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x\") pod \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.784154 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util\") pod \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.784280 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle\") pod \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\" (UID: \"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0\") " Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.785020 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle" (OuterVolumeSpecName: "bundle") pod "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" (UID: "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.792310 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x" (OuterVolumeSpecName: "kube-api-access-42f4x") pod "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" (UID: "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0"). InnerVolumeSpecName "kube-api-access-42f4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.798443 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util" (OuterVolumeSpecName: "util") pod "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" (UID: "4c96b09c-5d00-4c2d-bcfb-dee1f80943c0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.886945 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42f4x\" (UniqueName: \"kubernetes.io/projected/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-kube-api-access-42f4x\") on node \"crc\" DevicePath \"\"" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.887046 4763 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-util\") on node \"crc\" DevicePath \"\"" Dec 06 08:22:55 crc kubenswrapper[4763]: I1206 08:22:55.887105 4763 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4c96b09c-5d00-4c2d-bcfb-dee1f80943c0-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:22:56 crc kubenswrapper[4763]: I1206 08:22:56.168966 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" Dec 06 08:22:56 crc kubenswrapper[4763]: I1206 08:22:56.169004 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8" event={"ID":"4c96b09c-5d00-4c2d-bcfb-dee1f80943c0","Type":"ContainerDied","Data":"6375fac2fe0139ad188778643609f5a0bada0286bd6bdba0567a5a34960978fd"} Dec 06 08:22:56 crc kubenswrapper[4763]: I1206 08:22:56.169050 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6375fac2fe0139ad188778643609f5a0bada0286bd6bdba0567a5a34960978fd" Dec 06 08:22:56 crc kubenswrapper[4763]: I1206 08:22:56.173936 4763 generic.go:334] "Generic (PLEG): container finished" podID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerID="1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c" exitCode=0 Dec 06 08:22:56 crc kubenswrapper[4763]: I1206 08:22:56.173980 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerDied","Data":"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c"} Dec 06 08:22:56 crc kubenswrapper[4763]: E1206 08:22:56.289751 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c96b09c_5d00_4c2d_bcfb_dee1f80943c0.slice/crio-6375fac2fe0139ad188778643609f5a0bada0286bd6bdba0567a5a34960978fd\": RecentStats: unable to find data in memory cache]" Dec 06 08:22:57 crc kubenswrapper[4763]: I1206 08:22:57.194008 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerStarted","Data":"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357"} Dec 06 08:22:57 crc kubenswrapper[4763]: I1206 08:22:57.219672 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6czsc" podStartSLOduration=1.746959991 podStartE2EDuration="6.219650984s" podCreationTimestamp="2025-12-06 08:22:51 +0000 UTC" firstStartedPulling="2025-12-06 08:22:52.12193329 +0000 UTC m=+654.697638328" lastFinishedPulling="2025-12-06 08:22:56.594624273 +0000 UTC m=+659.170329321" observedRunningTime="2025-12-06 08:22:57.215342402 +0000 UTC m=+659.791047440" watchObservedRunningTime="2025-12-06 08:22:57.219650984 +0000 UTC m=+659.795356022" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.050967 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh"] Dec 06 08:22:58 crc kubenswrapper[4763]: E1206 08:22:58.051715 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="extract" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.051735 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="extract" Dec 06 08:22:58 crc kubenswrapper[4763]: E1206 08:22:58.051754 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="pull" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.051763 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="pull" Dec 06 08:22:58 crc kubenswrapper[4763]: E1206 08:22:58.051776 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="util" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.051784 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="util" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.051949 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c96b09c-5d00-4c2d-bcfb-dee1f80943c0" containerName="extract" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.052432 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.054555 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.058009 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh"] Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.058205 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-jmklc" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.061642 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.113486 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-685g9\" (UniqueName: \"kubernetes.io/projected/49419955-d174-44cb-ac59-84037352f94f-kube-api-access-685g9\") pod \"nmstate-operator-5b5b58f5c8-krqlh\" (UID: \"49419955-d174-44cb-ac59-84037352f94f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.214765 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-685g9\" (UniqueName: \"kubernetes.io/projected/49419955-d174-44cb-ac59-84037352f94f-kube-api-access-685g9\") pod \"nmstate-operator-5b5b58f5c8-krqlh\" (UID: \"49419955-d174-44cb-ac59-84037352f94f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.239595 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-685g9\" (UniqueName: \"kubernetes.io/projected/49419955-d174-44cb-ac59-84037352f94f-kube-api-access-685g9\") pod \"nmstate-operator-5b5b58f5c8-krqlh\" (UID: \"49419955-d174-44cb-ac59-84037352f94f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.366140 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" Dec 06 08:22:58 crc kubenswrapper[4763]: I1206 08:22:58.575627 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh"] Dec 06 08:22:58 crc kubenswrapper[4763]: W1206 08:22:58.584099 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49419955_d174_44cb_ac59_84037352f94f.slice/crio-d5f15290ff3cfc7565a8808c14c0e2a1f64a76b2afd8e361a629e791d46a0fc8 WatchSource:0}: Error finding container d5f15290ff3cfc7565a8808c14c0e2a1f64a76b2afd8e361a629e791d46a0fc8: Status 404 returned error can't find the container with id d5f15290ff3cfc7565a8808c14c0e2a1f64a76b2afd8e361a629e791d46a0fc8 Dec 06 08:22:59 crc kubenswrapper[4763]: I1206 08:22:59.204626 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" event={"ID":"49419955-d174-44cb-ac59-84037352f94f","Type":"ContainerStarted","Data":"d5f15290ff3cfc7565a8808c14c0e2a1f64a76b2afd8e361a629e791d46a0fc8"} Dec 06 08:23:01 crc kubenswrapper[4763]: I1206 08:23:01.461456 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:01 crc kubenswrapper[4763]: I1206 08:23:01.461793 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:02 crc kubenswrapper[4763]: I1206 08:23:02.221771 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" event={"ID":"49419955-d174-44cb-ac59-84037352f94f","Type":"ContainerStarted","Data":"d47c4a6a82a2e21b1b1119cb62da82c39a4d0bb0cde507118a27847c88095e36"} Dec 06 08:23:02 crc kubenswrapper[4763]: I1206 08:23:02.245262 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-krqlh" podStartSLOduration=1.178429061 podStartE2EDuration="4.245241408s" podCreationTimestamp="2025-12-06 08:22:58 +0000 UTC" firstStartedPulling="2025-12-06 08:22:58.586352264 +0000 UTC m=+661.162057302" lastFinishedPulling="2025-12-06 08:23:01.653164611 +0000 UTC m=+664.228869649" observedRunningTime="2025-12-06 08:23:02.23997498 +0000 UTC m=+664.815680028" watchObservedRunningTime="2025-12-06 08:23:02.245241408 +0000 UTC m=+664.820946446" Dec 06 08:23:02 crc kubenswrapper[4763]: I1206 08:23:02.503946 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6czsc" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="registry-server" probeResult="failure" output=< Dec 06 08:23:02 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 08:23:02 crc kubenswrapper[4763]: > Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.334422 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.335534 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.338209 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-dx5t5" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.349443 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.350195 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.351804 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.362675 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.370661 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.395514 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vm7q\" (UniqueName: \"kubernetes.io/projected/0f64e800-57db-4061-807d-90160767d69e-kube-api-access-9vm7q\") pod \"nmstate-metrics-7f946cbc9-8p9dx\" (UID: \"0f64e800-57db-4061-807d-90160767d69e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.395590 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.395674 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9bqz\" (UniqueName: \"kubernetes.io/projected/4a2e50c2-eda2-4acc-b454-7b07d430954a-kube-api-access-z9bqz\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.410229 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-swq67"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.411186 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496451 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496522 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-ovs-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496562 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9bqz\" (UniqueName: \"kubernetes.io/projected/4a2e50c2-eda2-4acc-b454-7b07d430954a-kube-api-access-z9bqz\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496582 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjd5d\" (UniqueName: \"kubernetes.io/projected/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-kube-api-access-zjd5d\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496600 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vm7q\" (UniqueName: \"kubernetes.io/projected/0f64e800-57db-4061-807d-90160767d69e-kube-api-access-9vm7q\") pod \"nmstate-metrics-7f946cbc9-8p9dx\" (UID: \"0f64e800-57db-4061-807d-90160767d69e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496627 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-nmstate-lock\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.496653 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-dbus-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: E1206 08:23:03.496775 4763 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 06 08:23:03 crc kubenswrapper[4763]: E1206 08:23:03.496821 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair podName:4a2e50c2-eda2-4acc-b454-7b07d430954a nodeName:}" failed. No retries permitted until 2025-12-06 08:23:03.99680609 +0000 UTC m=+666.572511128 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-pqqmw" (UID: "4a2e50c2-eda2-4acc-b454-7b07d430954a") : secret "openshift-nmstate-webhook" not found Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.532494 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vm7q\" (UniqueName: \"kubernetes.io/projected/0f64e800-57db-4061-807d-90160767d69e-kube-api-access-9vm7q\") pod \"nmstate-metrics-7f946cbc9-8p9dx\" (UID: \"0f64e800-57db-4061-807d-90160767d69e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.543116 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9bqz\" (UniqueName: \"kubernetes.io/projected/4a2e50c2-eda2-4acc-b454-7b07d430954a-kube-api-access-z9bqz\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.549002 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.549802 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.553353 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-m7kz8" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.553869 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.554057 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.565837 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601675 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjd5d\" (UniqueName: \"kubernetes.io/projected/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-kube-api-access-zjd5d\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601728 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-nmstate-lock\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601787 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-dbus-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601817 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhqc7\" (UniqueName: \"kubernetes.io/projected/f8a1df64-4620-4fb1-904a-487fed4df908-kube-api-access-fhqc7\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601858 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8a1df64-4620-4fb1-904a-487fed4df908-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601887 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f8a1df64-4620-4fb1-904a-487fed4df908-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.601926 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-ovs-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.602005 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-ovs-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.602175 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-nmstate-lock\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.602269 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-dbus-socket\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.620570 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjd5d\" (UniqueName: \"kubernetes.io/projected/6dc27b5f-e3e1-4aab-842f-db79e092bf9a-kube-api-access-zjd5d\") pod \"nmstate-handler-swq67\" (UID: \"6dc27b5f-e3e1-4aab-842f-db79e092bf9a\") " pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.650309 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.702872 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhqc7\" (UniqueName: \"kubernetes.io/projected/f8a1df64-4620-4fb1-904a-487fed4df908-kube-api-access-fhqc7\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.702952 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8a1df64-4620-4fb1-904a-487fed4df908-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.702982 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f8a1df64-4620-4fb1-904a-487fed4df908-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.703940 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f8a1df64-4620-4fb1-904a-487fed4df908-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.707155 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8a1df64-4620-4fb1-904a-487fed4df908-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.722744 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhqc7\" (UniqueName: \"kubernetes.io/projected/f8a1df64-4620-4fb1-904a-487fed4df908-kube-api-access-fhqc7\") pod \"nmstate-console-plugin-7fbb5f6569-pxm57\" (UID: \"f8a1df64-4620-4fb1-904a-487fed4df908\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.737475 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.753869 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-565b65f7b-bzxjp"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.755204 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.800685 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-565b65f7b-bzxjp"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804192 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-service-ca\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804378 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hgnp\" (UniqueName: \"kubernetes.io/projected/2b6e192d-3f58-4809-a89e-1567977cd292-kube-api-access-8hgnp\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804411 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-oauth-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804431 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804467 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-trusted-ca-bundle\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804586 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-console-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.804637 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-oauth-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.875467 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906468 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-console-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906555 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-oauth-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906619 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-service-ca\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906668 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hgnp\" (UniqueName: \"kubernetes.io/projected/2b6e192d-3f58-4809-a89e-1567977cd292-kube-api-access-8hgnp\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906693 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-oauth-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906718 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.906754 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-trusted-ca-bundle\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.907408 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-console-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.908254 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-oauth-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.908323 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-trusted-ca-bundle\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.908803 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2b6e192d-3f58-4809-a89e-1567977cd292-service-ca\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.911524 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx"] Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.913631 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-oauth-config\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.913677 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b6e192d-3f58-4809-a89e-1567977cd292-console-serving-cert\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:03 crc kubenswrapper[4763]: W1206 08:23:03.927023 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f64e800_57db_4061_807d_90160767d69e.slice/crio-1f6cdbe753d0a8892b016d19e21401596e4471846300799968221c553a4ee1b3 WatchSource:0}: Error finding container 1f6cdbe753d0a8892b016d19e21401596e4471846300799968221c553a4ee1b3: Status 404 returned error can't find the container with id 1f6cdbe753d0a8892b016d19e21401596e4471846300799968221c553a4ee1b3 Dec 06 08:23:03 crc kubenswrapper[4763]: I1206 08:23:03.930103 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hgnp\" (UniqueName: \"kubernetes.io/projected/2b6e192d-3f58-4809-a89e-1567977cd292-kube-api-access-8hgnp\") pod \"console-565b65f7b-bzxjp\" (UID: \"2b6e192d-3f58-4809-a89e-1567977cd292\") " pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.008288 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.013139 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4a2e50c2-eda2-4acc-b454-7b07d430954a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-pqqmw\" (UID: \"4a2e50c2-eda2-4acc-b454-7b07d430954a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.073419 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57"] Dec 06 08:23:04 crc kubenswrapper[4763]: W1206 08:23:04.078083 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8a1df64_4620_4fb1_904a_487fed4df908.slice/crio-12b36d37a2fc5800bef4ff2c32a37fc23678165bd8e83d65cea27b70b31aa363 WatchSource:0}: Error finding container 12b36d37a2fc5800bef4ff2c32a37fc23678165bd8e83d65cea27b70b31aa363: Status 404 returned error can't find the container with id 12b36d37a2fc5800bef4ff2c32a37fc23678165bd8e83d65cea27b70b31aa363 Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.090736 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.233501 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-swq67" event={"ID":"6dc27b5f-e3e1-4aab-842f-db79e092bf9a","Type":"ContainerStarted","Data":"8be31dbc5107a19539e7fc4b8a16e2aa8d41ba396bae10968d3dfb6e2e62791f"} Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.234970 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" event={"ID":"f8a1df64-4620-4fb1-904a-487fed4df908","Type":"ContainerStarted","Data":"12b36d37a2fc5800bef4ff2c32a37fc23678165bd8e83d65cea27b70b31aa363"} Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.236076 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" event={"ID":"0f64e800-57db-4061-807d-90160767d69e","Type":"ContainerStarted","Data":"1f6cdbe753d0a8892b016d19e21401596e4471846300799968221c553a4ee1b3"} Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.263229 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.272189 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-565b65f7b-bzxjp"] Dec 06 08:23:04 crc kubenswrapper[4763]: W1206 08:23:04.279767 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b6e192d_3f58_4809_a89e_1567977cd292.slice/crio-a4a461a32a9e639d1e9cd3b29f136ace9ec61c5dca4db4688a8d1f440f849eab WatchSource:0}: Error finding container a4a461a32a9e639d1e9cd3b29f136ace9ec61c5dca4db4688a8d1f440f849eab: Status 404 returned error can't find the container with id a4a461a32a9e639d1e9cd3b29f136ace9ec61c5dca4db4688a8d1f440f849eab Dec 06 08:23:04 crc kubenswrapper[4763]: I1206 08:23:04.433355 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw"] Dec 06 08:23:05 crc kubenswrapper[4763]: I1206 08:23:05.243818 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-565b65f7b-bzxjp" event={"ID":"2b6e192d-3f58-4809-a89e-1567977cd292","Type":"ContainerStarted","Data":"a4a461a32a9e639d1e9cd3b29f136ace9ec61c5dca4db4688a8d1f440f849eab"} Dec 06 08:23:05 crc kubenswrapper[4763]: I1206 08:23:05.244772 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" event={"ID":"4a2e50c2-eda2-4acc-b454-7b07d430954a","Type":"ContainerStarted","Data":"65b43c69f8d844d670f96cc9a5d34de1638965fd99fd7316105015e5fb44136d"} Dec 06 08:23:06 crc kubenswrapper[4763]: I1206 08:23:06.251270 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-565b65f7b-bzxjp" event={"ID":"2b6e192d-3f58-4809-a89e-1567977cd292","Type":"ContainerStarted","Data":"d292629e52fcc2db58397417cf40fc24bdf10ed303611454f1d623cc2724f63e"} Dec 06 08:23:07 crc kubenswrapper[4763]: I1206 08:23:07.272289 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-565b65f7b-bzxjp" podStartSLOduration=4.27227362 podStartE2EDuration="4.27227362s" podCreationTimestamp="2025-12-06 08:23:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:23:07.270812071 +0000 UTC m=+669.846517109" watchObservedRunningTime="2025-12-06 08:23:07.27227362 +0000 UTC m=+669.847978648" Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.270238 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-swq67" event={"ID":"6dc27b5f-e3e1-4aab-842f-db79e092bf9a","Type":"ContainerStarted","Data":"879012a693927e5301c0c08353425d6a73f7093e22a69c4d6e3221eff0a2130d"} Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.270923 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.271937 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" event={"ID":"f8a1df64-4620-4fb1-904a-487fed4df908","Type":"ContainerStarted","Data":"d72725e8997cbf19d1478305ec8c6e3b4af84a4b3cf41d4e04c6f34b5c2cdaab"} Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.273180 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" event={"ID":"0f64e800-57db-4061-807d-90160767d69e","Type":"ContainerStarted","Data":"693fa54b83010b776ce5a7f7299d5bb186cea17e7a79b2456afbb9bb2fd2521a"} Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.274972 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" event={"ID":"4a2e50c2-eda2-4acc-b454-7b07d430954a","Type":"ContainerStarted","Data":"d3f42eb0dd3856f03ac710669808a03248fe95659e1d2edf0ee29ddcd916bce8"} Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.275490 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.289999 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-swq67" podStartSLOduration=1.790456855 podStartE2EDuration="6.289981821s" podCreationTimestamp="2025-12-06 08:23:03 +0000 UTC" firstStartedPulling="2025-12-06 08:23:03.809968968 +0000 UTC m=+666.385674006" lastFinishedPulling="2025-12-06 08:23:08.309493914 +0000 UTC m=+670.885198972" observedRunningTime="2025-12-06 08:23:09.287180428 +0000 UTC m=+671.862885466" watchObservedRunningTime="2025-12-06 08:23:09.289981821 +0000 UTC m=+671.865686859" Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.309995 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" podStartSLOduration=2.494740023 podStartE2EDuration="6.309978455s" podCreationTimestamp="2025-12-06 08:23:03 +0000 UTC" firstStartedPulling="2025-12-06 08:23:04.443330817 +0000 UTC m=+667.019035855" lastFinishedPulling="2025-12-06 08:23:08.258569249 +0000 UTC m=+670.834274287" observedRunningTime="2025-12-06 08:23:09.305034345 +0000 UTC m=+671.880739383" watchObservedRunningTime="2025-12-06 08:23:09.309978455 +0000 UTC m=+671.885683493" Dec 06 08:23:09 crc kubenswrapper[4763]: I1206 08:23:09.361271 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-pxm57" podStartSLOduration=2.182816567 podStartE2EDuration="6.361255978s" podCreationTimestamp="2025-12-06 08:23:03 +0000 UTC" firstStartedPulling="2025-12-06 08:23:04.080397185 +0000 UTC m=+666.656102223" lastFinishedPulling="2025-12-06 08:23:08.258836596 +0000 UTC m=+670.834541634" observedRunningTime="2025-12-06 08:23:09.360389976 +0000 UTC m=+671.936095024" watchObservedRunningTime="2025-12-06 08:23:09.361255978 +0000 UTC m=+671.936961016" Dec 06 08:23:11 crc kubenswrapper[4763]: I1206 08:23:11.289925 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" event={"ID":"0f64e800-57db-4061-807d-90160767d69e","Type":"ContainerStarted","Data":"87a3a43922a5422089db47f7cb8eded595667312da1e9eb67785ca5bf4275100"} Dec 06 08:23:11 crc kubenswrapper[4763]: I1206 08:23:11.308618 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8p9dx" podStartSLOduration=1.887805196 podStartE2EDuration="8.308601905s" podCreationTimestamp="2025-12-06 08:23:03 +0000 UTC" firstStartedPulling="2025-12-06 08:23:03.928830043 +0000 UTC m=+666.504535081" lastFinishedPulling="2025-12-06 08:23:10.349626752 +0000 UTC m=+672.925331790" observedRunningTime="2025-12-06 08:23:11.304100887 +0000 UTC m=+673.879805925" watchObservedRunningTime="2025-12-06 08:23:11.308601905 +0000 UTC m=+673.884306943" Dec 06 08:23:11 crc kubenswrapper[4763]: I1206 08:23:11.507684 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:11 crc kubenswrapper[4763]: I1206 08:23:11.550223 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:11 crc kubenswrapper[4763]: I1206 08:23:11.736059 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:23:13 crc kubenswrapper[4763]: I1206 08:23:13.302462 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6czsc" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="registry-server" containerID="cri-o://d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357" gracePeriod=2 Dec 06 08:23:13 crc kubenswrapper[4763]: I1206 08:23:13.776309 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-swq67" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.092163 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.092246 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.098143 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.194421 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.253672 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities\") pod \"35323f07-820a-4f29-9646-c7b859f6b8b7\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.253758 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content\") pod \"35323f07-820a-4f29-9646-c7b859f6b8b7\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.253808 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9nww\" (UniqueName: \"kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww\") pod \"35323f07-820a-4f29-9646-c7b859f6b8b7\" (UID: \"35323f07-820a-4f29-9646-c7b859f6b8b7\") " Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.255299 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities" (OuterVolumeSpecName: "utilities") pod "35323f07-820a-4f29-9646-c7b859f6b8b7" (UID: "35323f07-820a-4f29-9646-c7b859f6b8b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.263287 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww" (OuterVolumeSpecName: "kube-api-access-t9nww") pod "35323f07-820a-4f29-9646-c7b859f6b8b7" (UID: "35323f07-820a-4f29-9646-c7b859f6b8b7"). InnerVolumeSpecName "kube-api-access-t9nww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.310472 4763 generic.go:334] "Generic (PLEG): container finished" podID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerID="d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357" exitCode=0 Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.310541 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czsc" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.310590 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerDied","Data":"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357"} Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.310688 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czsc" event={"ID":"35323f07-820a-4f29-9646-c7b859f6b8b7","Type":"ContainerDied","Data":"364bc916d15bb4d6e69370bf8961bd9eb4de67abe25a7047c0daa740324c440b"} Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.310709 4763 scope.go:117] "RemoveContainer" containerID="d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.316518 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-565b65f7b-bzxjp" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.330781 4763 scope.go:117] "RemoveContainer" containerID="1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.359992 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.360027 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9nww\" (UniqueName: \"kubernetes.io/projected/35323f07-820a-4f29-9646-c7b859f6b8b7-kube-api-access-t9nww\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.371193 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.382768 4763 scope.go:117] "RemoveContainer" containerID="c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.391884 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35323f07-820a-4f29-9646-c7b859f6b8b7" (UID: "35323f07-820a-4f29-9646-c7b859f6b8b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.408290 4763 scope.go:117] "RemoveContainer" containerID="d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357" Dec 06 08:23:14 crc kubenswrapper[4763]: E1206 08:23:14.418368 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357\": container with ID starting with d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357 not found: ID does not exist" containerID="d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.418413 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357"} err="failed to get container status \"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357\": rpc error: code = NotFound desc = could not find container \"d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357\": container with ID starting with d5cbb81dc15930fed2555390f9a1f92b7870383559b988c1979c8396cbe7e357 not found: ID does not exist" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.418438 4763 scope.go:117] "RemoveContainer" containerID="1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c" Dec 06 08:23:14 crc kubenswrapper[4763]: E1206 08:23:14.419771 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c\": container with ID starting with 1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c not found: ID does not exist" containerID="1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.419811 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c"} err="failed to get container status \"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c\": rpc error: code = NotFound desc = could not find container \"1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c\": container with ID starting with 1404b780a539a4ba0337fbaac353df36196dfcf5d2382db7c1d436fd201a2a1c not found: ID does not exist" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.419838 4763 scope.go:117] "RemoveContainer" containerID="c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21" Dec 06 08:23:14 crc kubenswrapper[4763]: E1206 08:23:14.420910 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21\": container with ID starting with c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21 not found: ID does not exist" containerID="c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.420940 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21"} err="failed to get container status \"c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21\": rpc error: code = NotFound desc = could not find container \"c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21\": container with ID starting with c0e66a5d0b3ec6089825f434d8145482b5b8c235a74d8ce0969deecae13e0f21 not found: ID does not exist" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.461596 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35323f07-820a-4f29-9646-c7b859f6b8b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.636821 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:23:14 crc kubenswrapper[4763]: I1206 08:23:14.640616 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6czsc"] Dec 06 08:23:15 crc kubenswrapper[4763]: I1206 08:23:15.732637 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" path="/var/lib/kubelet/pods/35323f07-820a-4f29-9646-c7b859f6b8b7/volumes" Dec 06 08:23:24 crc kubenswrapper[4763]: I1206 08:23:24.275455 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-pqqmw" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.825867 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm"] Dec 06 08:23:38 crc kubenswrapper[4763]: E1206 08:23:38.826656 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="registry-server" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.826674 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="registry-server" Dec 06 08:23:38 crc kubenswrapper[4763]: E1206 08:23:38.826692 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="extract-content" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.826700 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="extract-content" Dec 06 08:23:38 crc kubenswrapper[4763]: E1206 08:23:38.826710 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="extract-utilities" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.826717 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="extract-utilities" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.826847 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="35323f07-820a-4f29-9646-c7b859f6b8b7" containerName="registry-server" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.828290 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.834233 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.835500 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm"] Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.884297 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z776r\" (UniqueName: \"kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.884374 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.884421 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.985397 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.985506 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z776r\" (UniqueName: \"kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.985545 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.985827 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:38 crc kubenswrapper[4763]: I1206 08:23:38.985866 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:39 crc kubenswrapper[4763]: I1206 08:23:39.005450 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z776r\" (UniqueName: \"kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:39 crc kubenswrapper[4763]: I1206 08:23:39.147104 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:39 crc kubenswrapper[4763]: I1206 08:23:39.418565 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-7j4g2" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" containerID="cri-o://cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2" gracePeriod=15 Dec 06 08:23:39 crc kubenswrapper[4763]: I1206 08:23:39.567108 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm"] Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.323517 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7j4g2_1aeea93c-2fff-4930-b63f-cd11cda5d8a0/console/0.log" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.323877 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.405851 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.405973 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406044 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sqts\" (UniqueName: \"kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406138 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406214 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406261 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406341 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle\") pod \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\" (UID: \"1aeea93c-2fff-4930-b63f-cd11cda5d8a0\") " Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406654 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca" (OuterVolumeSpecName: "service-ca") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.406941 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.407078 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.407475 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config" (OuterVolumeSpecName: "console-config") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.407550 4763 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-service-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.407759 4763 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.407774 4763 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.411837 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts" (OuterVolumeSpecName: "kube-api-access-4sqts") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "kube-api-access-4sqts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.414979 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.415421 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "1aeea93c-2fff-4930-b63f-cd11cda5d8a0" (UID: "1aeea93c-2fff-4930-b63f-cd11cda5d8a0"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.504166 4763 generic.go:334] "Generic (PLEG): container finished" podID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerID="992063b20100f4f3920731c96794ca4e85e6238dca22203feaf6d7a9517cb74c" exitCode=0 Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.504260 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" event={"ID":"ffa08e65-01fc-4524-b474-2e1f7193fa69","Type":"ContainerDied","Data":"992063b20100f4f3920731c96794ca4e85e6238dca22203feaf6d7a9517cb74c"} Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.504304 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" event={"ID":"ffa08e65-01fc-4524-b474-2e1f7193fa69","Type":"ContainerStarted","Data":"2af562e48898de324145876b542934561d8d40fbf3b78b58d6c38c04d5dfc8a6"} Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.505898 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7j4g2_1aeea93c-2fff-4930-b63f-cd11cda5d8a0/console/0.log" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.505965 4763 generic.go:334] "Generic (PLEG): container finished" podID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerID="cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2" exitCode=2 Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.505990 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7j4g2" event={"ID":"1aeea93c-2fff-4930-b63f-cd11cda5d8a0","Type":"ContainerDied","Data":"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2"} Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.506014 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7j4g2" event={"ID":"1aeea93c-2fff-4930-b63f-cd11cda5d8a0","Type":"ContainerDied","Data":"c2b9931cbc3e3bb0e60d5c7ac629049c903e03a14109e328226e09de4dceeb43"} Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.506035 4763 scope.go:117] "RemoveContainer" containerID="cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.506069 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7j4g2" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.509192 4763 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.509210 4763 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.509218 4763 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-console-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.509227 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sqts\" (UniqueName: \"kubernetes.io/projected/1aeea93c-2fff-4930-b63f-cd11cda5d8a0-kube-api-access-4sqts\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.530654 4763 scope.go:117] "RemoveContainer" containerID="cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2" Dec 06 08:23:40 crc kubenswrapper[4763]: E1206 08:23:40.531369 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2\": container with ID starting with cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2 not found: ID does not exist" containerID="cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.531411 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2"} err="failed to get container status \"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2\": rpc error: code = NotFound desc = could not find container \"cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2\": container with ID starting with cfffc79a2eab8b02f85cfefb7e808f5e8f5f359af4aaf2282f6b3b15de6512f2 not found: ID does not exist" Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.543097 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:23:40 crc kubenswrapper[4763]: I1206 08:23:40.545468 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-7j4g2"] Dec 06 08:23:41 crc kubenswrapper[4763]: I1206 08:23:41.726200 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" path="/var/lib/kubelet/pods/1aeea93c-2fff-4930-b63f-cd11cda5d8a0/volumes" Dec 06 08:23:42 crc kubenswrapper[4763]: I1206 08:23:42.519409 4763 generic.go:334] "Generic (PLEG): container finished" podID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerID="84a09bdefed64c5afd96be7d1bedf260dfd0e309241d59e1cfcf6f021813c370" exitCode=0 Dec 06 08:23:42 crc kubenswrapper[4763]: I1206 08:23:42.519469 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" event={"ID":"ffa08e65-01fc-4524-b474-2e1f7193fa69","Type":"ContainerDied","Data":"84a09bdefed64c5afd96be7d1bedf260dfd0e309241d59e1cfcf6f021813c370"} Dec 06 08:23:42 crc kubenswrapper[4763]: I1206 08:23:42.537206 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:23:42 crc kubenswrapper[4763]: I1206 08:23:42.537263 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:23:43 crc kubenswrapper[4763]: I1206 08:23:43.533791 4763 generic.go:334] "Generic (PLEG): container finished" podID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerID="f9cb2d15e8584454628278a060206edc32b76040ed9ad6198afca23410ba1e1f" exitCode=0 Dec 06 08:23:43 crc kubenswrapper[4763]: I1206 08:23:43.533862 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" event={"ID":"ffa08e65-01fc-4524-b474-2e1f7193fa69","Type":"ContainerDied","Data":"f9cb2d15e8584454628278a060206edc32b76040ed9ad6198afca23410ba1e1f"} Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.752236 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.856466 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle\") pod \"ffa08e65-01fc-4524-b474-2e1f7193fa69\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.856647 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z776r\" (UniqueName: \"kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r\") pod \"ffa08e65-01fc-4524-b474-2e1f7193fa69\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.856681 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util\") pod \"ffa08e65-01fc-4524-b474-2e1f7193fa69\" (UID: \"ffa08e65-01fc-4524-b474-2e1f7193fa69\") " Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.857666 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle" (OuterVolumeSpecName: "bundle") pod "ffa08e65-01fc-4524-b474-2e1f7193fa69" (UID: "ffa08e65-01fc-4524-b474-2e1f7193fa69"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.861201 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r" (OuterVolumeSpecName: "kube-api-access-z776r") pod "ffa08e65-01fc-4524-b474-2e1f7193fa69" (UID: "ffa08e65-01fc-4524-b474-2e1f7193fa69"). InnerVolumeSpecName "kube-api-access-z776r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.869805 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util" (OuterVolumeSpecName: "util") pod "ffa08e65-01fc-4524-b474-2e1f7193fa69" (UID: "ffa08e65-01fc-4524-b474-2e1f7193fa69"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.958157 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z776r\" (UniqueName: \"kubernetes.io/projected/ffa08e65-01fc-4524-b474-2e1f7193fa69-kube-api-access-z776r\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.958191 4763 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-util\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:44 crc kubenswrapper[4763]: I1206 08:23:44.958202 4763 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ffa08e65-01fc-4524-b474-2e1f7193fa69-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:23:45 crc kubenswrapper[4763]: I1206 08:23:45.549263 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" event={"ID":"ffa08e65-01fc-4524-b474-2e1f7193fa69","Type":"ContainerDied","Data":"2af562e48898de324145876b542934561d8d40fbf3b78b58d6c38c04d5dfc8a6"} Dec 06 08:23:45 crc kubenswrapper[4763]: I1206 08:23:45.549589 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2af562e48898de324145876b542934561d8d40fbf3b78b58d6c38c04d5dfc8a6" Dec 06 08:23:45 crc kubenswrapper[4763]: I1206 08:23:45.549346 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.748947 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc"] Dec 06 08:23:53 crc kubenswrapper[4763]: E1206 08:23:53.751931 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="util" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.751965 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="util" Dec 06 08:23:53 crc kubenswrapper[4763]: E1206 08:23:53.751986 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="extract" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.752001 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="extract" Dec 06 08:23:53 crc kubenswrapper[4763]: E1206 08:23:53.752017 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.752024 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" Dec 06 08:23:53 crc kubenswrapper[4763]: E1206 08:23:53.752038 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="pull" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.752045 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="pull" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.752251 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffa08e65-01fc-4524-b474-2e1f7193fa69" containerName="extract" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.752331 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aeea93c-2fff-4930-b63f-cd11cda5d8a0" containerName="console" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.771312 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.775959 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.775972 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.776314 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-slblp" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.776489 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.776731 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.786760 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc"] Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.868548 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-webhook-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.868680 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-apiservice-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.868733 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g28c\" (UniqueName: \"kubernetes.io/projected/0834bbd3-fa5d-4e25-9c42-0597716b8d60-kube-api-access-2g28c\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.969405 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-apiservice-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.969480 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g28c\" (UniqueName: \"kubernetes.io/projected/0834bbd3-fa5d-4e25-9c42-0597716b8d60-kube-api-access-2g28c\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.969522 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-webhook-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.975207 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-webhook-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.980269 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0834bbd3-fa5d-4e25-9c42-0597716b8d60-apiservice-cert\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:53 crc kubenswrapper[4763]: I1206 08:23:53.997243 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g28c\" (UniqueName: \"kubernetes.io/projected/0834bbd3-fa5d-4e25-9c42-0597716b8d60-kube-api-access-2g28c\") pod \"metallb-operator-controller-manager-c6d948bc6-6vpfc\" (UID: \"0834bbd3-fa5d-4e25-9c42-0597716b8d60\") " pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.104947 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.108188 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz"] Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.108878 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.110816 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.111806 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.112673 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-tmxwd" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.159783 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz"] Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.171252 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-webhook-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.171565 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-apiservice-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.171602 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwfms\" (UniqueName: \"kubernetes.io/projected/97adad42-5533-4a55-81ad-5b98cc51efb7-kube-api-access-zwfms\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.272451 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-webhook-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.272497 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-apiservice-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.272527 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwfms\" (UniqueName: \"kubernetes.io/projected/97adad42-5533-4a55-81ad-5b98cc51efb7-kube-api-access-zwfms\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.277250 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-webhook-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.292686 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97adad42-5533-4a55-81ad-5b98cc51efb7-apiservice-cert\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.299475 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwfms\" (UniqueName: \"kubernetes.io/projected/97adad42-5533-4a55-81ad-5b98cc51efb7-kube-api-access-zwfms\") pod \"metallb-operator-webhook-server-5c5f4f877c-n99dz\" (UID: \"97adad42-5533-4a55-81ad-5b98cc51efb7\") " pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.433072 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.454725 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc"] Dec 06 08:23:54 crc kubenswrapper[4763]: W1206 08:23:54.469699 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0834bbd3_fa5d_4e25_9c42_0597716b8d60.slice/crio-af8f22c2075f2a3a7851ad5abba64179a3a61ee82043a194fa6d18ed3f114ccb WatchSource:0}: Error finding container af8f22c2075f2a3a7851ad5abba64179a3a61ee82043a194fa6d18ed3f114ccb: Status 404 returned error can't find the container with id af8f22c2075f2a3a7851ad5abba64179a3a61ee82043a194fa6d18ed3f114ccb Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.609082 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" event={"ID":"0834bbd3-fa5d-4e25-9c42-0597716b8d60","Type":"ContainerStarted","Data":"af8f22c2075f2a3a7851ad5abba64179a3a61ee82043a194fa6d18ed3f114ccb"} Dec 06 08:23:54 crc kubenswrapper[4763]: I1206 08:23:54.853641 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz"] Dec 06 08:23:54 crc kubenswrapper[4763]: W1206 08:23:54.853977 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97adad42_5533_4a55_81ad_5b98cc51efb7.slice/crio-c5a65c94a152f1e439a5bf50e1603591dfe2901320f6bdacd6f648fb5f99da57 WatchSource:0}: Error finding container c5a65c94a152f1e439a5bf50e1603591dfe2901320f6bdacd6f648fb5f99da57: Status 404 returned error can't find the container with id c5a65c94a152f1e439a5bf50e1603591dfe2901320f6bdacd6f648fb5f99da57 Dec 06 08:23:55 crc kubenswrapper[4763]: I1206 08:23:55.616958 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" event={"ID":"97adad42-5533-4a55-81ad-5b98cc51efb7","Type":"ContainerStarted","Data":"c5a65c94a152f1e439a5bf50e1603591dfe2901320f6bdacd6f648fb5f99da57"} Dec 06 08:23:58 crc kubenswrapper[4763]: I1206 08:23:58.652254 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" event={"ID":"0834bbd3-fa5d-4e25-9c42-0597716b8d60","Type":"ContainerStarted","Data":"ecac441e3f4a24c517a136135955d0208d2c3f0dddbb47a699cd534ccb4d28e0"} Dec 06 08:23:58 crc kubenswrapper[4763]: I1206 08:23:58.653534 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:23:58 crc kubenswrapper[4763]: I1206 08:23:58.673567 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" podStartSLOduration=2.6281397760000003 podStartE2EDuration="5.67354618s" podCreationTimestamp="2025-12-06 08:23:53 +0000 UTC" firstStartedPulling="2025-12-06 08:23:54.473409132 +0000 UTC m=+717.049114170" lastFinishedPulling="2025-12-06 08:23:57.518815536 +0000 UTC m=+720.094520574" observedRunningTime="2025-12-06 08:23:58.669657957 +0000 UTC m=+721.245362995" watchObservedRunningTime="2025-12-06 08:23:58.67354618 +0000 UTC m=+721.249251218" Dec 06 08:24:00 crc kubenswrapper[4763]: I1206 08:24:00.669274 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" event={"ID":"97adad42-5533-4a55-81ad-5b98cc51efb7","Type":"ContainerStarted","Data":"3ea89f4c1d46f56dd608ff33a30f9ea310393a36df68268eb58ab6c2e1d0cb2b"} Dec 06 08:24:00 crc kubenswrapper[4763]: I1206 08:24:00.669412 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:24:00 crc kubenswrapper[4763]: I1206 08:24:00.694185 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" podStartSLOduration=1.890810458 podStartE2EDuration="6.69416881s" podCreationTimestamp="2025-12-06 08:23:54 +0000 UTC" firstStartedPulling="2025-12-06 08:23:54.855844384 +0000 UTC m=+717.431549422" lastFinishedPulling="2025-12-06 08:23:59.659202746 +0000 UTC m=+722.234907774" observedRunningTime="2025-12-06 08:24:00.689483026 +0000 UTC m=+723.265188074" watchObservedRunningTime="2025-12-06 08:24:00.69416881 +0000 UTC m=+723.269873848" Dec 06 08:24:12 crc kubenswrapper[4763]: I1206 08:24:12.537167 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:24:12 crc kubenswrapper[4763]: I1206 08:24:12.537891 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:24:14 crc kubenswrapper[4763]: I1206 08:24:14.454280 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5c5f4f877c-n99dz" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.107444 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-c6d948bc6-6vpfc" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.950578 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4lrrp"] Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.962923 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw"] Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.963312 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.973128 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.975227 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw"] Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.979491 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.979506 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.979714 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-g6d5c" Dec 06 08:24:34 crc kubenswrapper[4763]: I1206 08:24:34.980051 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.036968 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-mkf7x"] Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.037882 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.040668 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-pb7q5" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.040680 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.040704 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.043333 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.052046 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-ttjcf"] Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.053258 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.055175 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.069169 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-ttjcf"] Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081022 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/891a4eae-46dc-4ae1-bd31-d04889c9647e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081076 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3b9d7e71-435f-4f24-9686-436f44603eee-frr-startup\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081109 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3b9d7e71-435f-4f24-9686-436f44603eee-metrics-certs\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081151 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-metrics\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081180 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-reloader\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081213 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-sockets\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081237 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-conf\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081275 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqcn2\" (UniqueName: \"kubernetes.io/projected/3b9d7e71-435f-4f24-9686-436f44603eee-kube-api-access-fqcn2\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.081326 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f94lz\" (UniqueName: \"kubernetes.io/projected/891a4eae-46dc-4ae1-bd31-d04889c9647e-kube-api-access-f94lz\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183032 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqcn2\" (UniqueName: \"kubernetes.io/projected/3b9d7e71-435f-4f24-9686-436f44603eee-kube-api-access-fqcn2\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0446ab0f-7545-4953-b283-4d8edab363f5-metallb-excludel2\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cknf\" (UniqueName: \"kubernetes.io/projected/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-kube-api-access-7cknf\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183161 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-cert\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183193 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f94lz\" (UniqueName: \"kubernetes.io/projected/891a4eae-46dc-4ae1-bd31-d04889c9647e-kube-api-access-f94lz\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183293 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-metrics-certs\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183344 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/891a4eae-46dc-4ae1-bd31-d04889c9647e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183407 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3b9d7e71-435f-4f24-9686-436f44603eee-frr-startup\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183454 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8bwr\" (UniqueName: \"kubernetes.io/projected/0446ab0f-7545-4953-b283-4d8edab363f5-kube-api-access-z8bwr\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183491 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3b9d7e71-435f-4f24-9686-436f44603eee-metrics-certs\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183532 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-metrics\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183680 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-metrics-certs\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183779 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-reloader\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183822 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183880 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-sockets\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.183949 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-conf\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.184174 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-metrics\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.184201 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-reloader\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.184453 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-sockets\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.184629 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3b9d7e71-435f-4f24-9686-436f44603eee-frr-conf\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.184646 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3b9d7e71-435f-4f24-9686-436f44603eee-frr-startup\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.190939 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3b9d7e71-435f-4f24-9686-436f44603eee-metrics-certs\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.191421 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/891a4eae-46dc-4ae1-bd31-d04889c9647e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.202667 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqcn2\" (UniqueName: \"kubernetes.io/projected/3b9d7e71-435f-4f24-9686-436f44603eee-kube-api-access-fqcn2\") pod \"frr-k8s-4lrrp\" (UID: \"3b9d7e71-435f-4f24-9686-436f44603eee\") " pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.211474 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f94lz\" (UniqueName: \"kubernetes.io/projected/891a4eae-46dc-4ae1-bd31-d04889c9647e-kube-api-access-f94lz\") pod \"frr-k8s-webhook-server-7fcb986d4-ppdsw\" (UID: \"891a4eae-46dc-4ae1-bd31-d04889c9647e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285292 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8bwr\" (UniqueName: \"kubernetes.io/projected/0446ab0f-7545-4953-b283-4d8edab363f5-kube-api-access-z8bwr\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285341 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-metrics-certs\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285369 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: E1206 08:24:35.285529 4763 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 06 08:24:35 crc kubenswrapper[4763]: E1206 08:24:35.285597 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist podName:0446ab0f-7545-4953-b283-4d8edab363f5 nodeName:}" failed. No retries permitted until 2025-12-06 08:24:35.785579863 +0000 UTC m=+758.361284901 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist") pod "speaker-mkf7x" (UID: "0446ab0f-7545-4953-b283-4d8edab363f5") : secret "metallb-memberlist" not found Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285882 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0446ab0f-7545-4953-b283-4d8edab363f5-metallb-excludel2\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285925 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cknf\" (UniqueName: \"kubernetes.io/projected/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-kube-api-access-7cknf\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285943 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-cert\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.285966 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-metrics-certs\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.286532 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0446ab0f-7545-4953-b283-4d8edab363f5-metallb-excludel2\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.294559 4763 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.294683 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-metrics-certs\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.295103 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-metrics-certs\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.302526 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-cert\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.302763 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cknf\" (UniqueName: \"kubernetes.io/projected/bba5d03f-2f3a-48e7-8c8a-dd5531a680b4-kube-api-access-7cknf\") pod \"controller-f8648f98b-ttjcf\" (UID: \"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4\") " pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.302828 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.314678 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.319595 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8bwr\" (UniqueName: \"kubernetes.io/projected/0446ab0f-7545-4953-b283-4d8edab363f5-kube-api-access-z8bwr\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.371314 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.603734 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw"] Dec 06 08:24:35 crc kubenswrapper[4763]: W1206 08:24:35.613011 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod891a4eae_46dc_4ae1_bd31_d04889c9647e.slice/crio-ec2f1ad4b66b4d8f6202abe40ba039993bb63cb8209353c114fef0e058be3250 WatchSource:0}: Error finding container ec2f1ad4b66b4d8f6202abe40ba039993bb63cb8209353c114fef0e058be3250: Status 404 returned error can't find the container with id ec2f1ad4b66b4d8f6202abe40ba039993bb63cb8209353c114fef0e058be3250 Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.673008 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-ttjcf"] Dec 06 08:24:35 crc kubenswrapper[4763]: W1206 08:24:35.676691 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbba5d03f_2f3a_48e7_8c8a_dd5531a680b4.slice/crio-1da655efc1fbc80bb7e20cd8fb64dc94fe7c963cec3de3409601c507d126d509 WatchSource:0}: Error finding container 1da655efc1fbc80bb7e20cd8fb64dc94fe7c963cec3de3409601c507d126d509: Status 404 returned error can't find the container with id 1da655efc1fbc80bb7e20cd8fb64dc94fe7c963cec3de3409601c507d126d509 Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.800111 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:35 crc kubenswrapper[4763]: E1206 08:24:35.800225 4763 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 06 08:24:35 crc kubenswrapper[4763]: E1206 08:24:35.800753 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist podName:0446ab0f-7545-4953-b283-4d8edab363f5 nodeName:}" failed. No retries permitted until 2025-12-06 08:24:36.800730798 +0000 UTC m=+759.376435836 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist") pod "speaker-mkf7x" (UID: "0446ab0f-7545-4953-b283-4d8edab363f5") : secret "metallb-memberlist" not found Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.870498 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ttjcf" event={"ID":"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4","Type":"ContainerStarted","Data":"675a600b1e45f0abf9a0172829ca147c7a29bc3632e0d93f9f42b1e0387ae21f"} Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.870551 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ttjcf" event={"ID":"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4","Type":"ContainerStarted","Data":"1da655efc1fbc80bb7e20cd8fb64dc94fe7c963cec3de3409601c507d126d509"} Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.872005 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" event={"ID":"891a4eae-46dc-4ae1-bd31-d04889c9647e","Type":"ContainerStarted","Data":"ec2f1ad4b66b4d8f6202abe40ba039993bb63cb8209353c114fef0e058be3250"} Dec 06 08:24:35 crc kubenswrapper[4763]: I1206 08:24:35.872969 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"6255753dd2bd3850c614dd7c4cf44dfe41cde86cd7d638973fce40a647f05aa0"} Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.812794 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.818657 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0446ab0f-7545-4953-b283-4d8edab363f5-memberlist\") pod \"speaker-mkf7x\" (UID: \"0446ab0f-7545-4953-b283-4d8edab363f5\") " pod="metallb-system/speaker-mkf7x" Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.855062 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-mkf7x" Dec 06 08:24:36 crc kubenswrapper[4763]: W1206 08:24:36.890950 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0446ab0f_7545_4953_b283_4d8edab363f5.slice/crio-c2f2ce8b648c9250fe435a1abcb8f0bf8aaa1b2a52d7c9614259b0ecf5096e62 WatchSource:0}: Error finding container c2f2ce8b648c9250fe435a1abcb8f0bf8aaa1b2a52d7c9614259b0ecf5096e62: Status 404 returned error can't find the container with id c2f2ce8b648c9250fe435a1abcb8f0bf8aaa1b2a52d7c9614259b0ecf5096e62 Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.897173 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-ttjcf" event={"ID":"bba5d03f-2f3a-48e7-8c8a-dd5531a680b4","Type":"ContainerStarted","Data":"322c040474a28973400088955f33f79a8901e8f934c830dd48ebd25158021f1b"} Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.897313 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:36 crc kubenswrapper[4763]: I1206 08:24:36.917269 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-ttjcf" podStartSLOduration=1.917247187 podStartE2EDuration="1.917247187s" podCreationTimestamp="2025-12-06 08:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:24:36.910482237 +0000 UTC m=+759.486187295" watchObservedRunningTime="2025-12-06 08:24:36.917247187 +0000 UTC m=+759.492952225" Dec 06 08:24:37 crc kubenswrapper[4763]: I1206 08:24:37.909459 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-mkf7x" event={"ID":"0446ab0f-7545-4953-b283-4d8edab363f5","Type":"ContainerStarted","Data":"84b6ae2ef09f0e8b59595ffd94b76dc52892a1fe52784b36d406446ad63137ca"} Dec 06 08:24:37 crc kubenswrapper[4763]: I1206 08:24:37.909512 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-mkf7x" event={"ID":"0446ab0f-7545-4953-b283-4d8edab363f5","Type":"ContainerStarted","Data":"17e522f49e8154cd46140488c2940e2058412f455d466dbfb6bffc9bf9010bd4"} Dec 06 08:24:37 crc kubenswrapper[4763]: I1206 08:24:37.909524 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-mkf7x" event={"ID":"0446ab0f-7545-4953-b283-4d8edab363f5","Type":"ContainerStarted","Data":"c2f2ce8b648c9250fe435a1abcb8f0bf8aaa1b2a52d7c9614259b0ecf5096e62"} Dec 06 08:24:37 crc kubenswrapper[4763]: I1206 08:24:37.909705 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-mkf7x" Dec 06 08:24:37 crc kubenswrapper[4763]: I1206 08:24:37.931371 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-mkf7x" podStartSLOduration=2.931354618 podStartE2EDuration="2.931354618s" podCreationTimestamp="2025-12-06 08:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:24:37.926975182 +0000 UTC m=+760.502680220" watchObservedRunningTime="2025-12-06 08:24:37.931354618 +0000 UTC m=+760.507059656" Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.595783 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.596441 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.596489 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.597007 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.597058 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a" gracePeriod=600 Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.978674 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a"} Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.978810 4763 scope.go:117] "RemoveContainer" containerID="ffbb33058c1c9f7b77a07ef9a3e30aac957b2a1237a5325b0fa61d5ce390660f" Dec 06 08:24:42 crc kubenswrapper[4763]: I1206 08:24:42.978582 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a" exitCode=0 Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.006261 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" event={"ID":"891a4eae-46dc-4ae1-bd31-d04889c9647e","Type":"ContainerStarted","Data":"9e2c36b030c886832559b52d1869c70e7a08b468ca4608cd3cfafae5ec79ec26"} Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.006863 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.010212 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b9d7e71-435f-4f24-9686-436f44603eee" containerID="a8438ff04385845d56122085cbb5b73e0997f4cf8f0605a994b6f092f072b9e7" exitCode=0 Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.010845 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerDied","Data":"a8438ff04385845d56122085cbb5b73e0997f4cf8f0605a994b6f092f072b9e7"} Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.014462 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc"} Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.031219 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" podStartSLOduration=2.116356136 podStartE2EDuration="11.031198722s" podCreationTimestamp="2025-12-06 08:24:34 +0000 UTC" firstStartedPulling="2025-12-06 08:24:35.61472531 +0000 UTC m=+758.190430348" lastFinishedPulling="2025-12-06 08:24:44.529567896 +0000 UTC m=+767.105272934" observedRunningTime="2025-12-06 08:24:45.021200867 +0000 UTC m=+767.596905915" watchObservedRunningTime="2025-12-06 08:24:45.031198722 +0000 UTC m=+767.606903760" Dec 06 08:24:45 crc kubenswrapper[4763]: I1206 08:24:45.376704 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-ttjcf" Dec 06 08:24:46 crc kubenswrapper[4763]: I1206 08:24:46.025966 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b9d7e71-435f-4f24-9686-436f44603eee" containerID="680f0d6401fa81906aa0f003d442a5789d0807fec93689a5fb510b3dd3da546d" exitCode=0 Dec 06 08:24:46 crc kubenswrapper[4763]: I1206 08:24:46.026050 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerDied","Data":"680f0d6401fa81906aa0f003d442a5789d0807fec93689a5fb510b3dd3da546d"} Dec 06 08:24:47 crc kubenswrapper[4763]: I1206 08:24:47.037136 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b9d7e71-435f-4f24-9686-436f44603eee" containerID="8d0925be8ab57dd329dde67d665d77b0038e351015a9cb89e964887ed87fb7e3" exitCode=0 Dec 06 08:24:47 crc kubenswrapper[4763]: I1206 08:24:47.037258 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerDied","Data":"8d0925be8ab57dd329dde67d665d77b0038e351015a9cb89e964887ed87fb7e3"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048658 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"5da3ca6884fcf8c9e1ab19fd8bc62ba55c0b853e677fc1c9ad31bb21bc3e8614"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048709 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"aceee7058b78f54bfc8507f147d64b66ce5250d1eb7fb81e7fb5efdb25758ca4"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048723 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"d72022acb5f75ae9fd8cafb91792290ba8d0b2b46ef1c064118b0598a7732ef7"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048734 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"7abe8a0a80fcce69d6a44798f358da0f626e4f02960773b62a81085fbcfbbde3"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048745 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"2701bfdd442089c30068ab10a271a2d694e18121a20933ed981ff3bd3fbc47dd"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048755 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4lrrp" event={"ID":"3b9d7e71-435f-4f24-9686-436f44603eee","Type":"ContainerStarted","Data":"92e947e7901bb03c7d885f5f07e74b4af164955eb9a1a56f3ddaa492dcd9093d"} Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.048862 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:48 crc kubenswrapper[4763]: I1206 08:24:48.071610 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4lrrp" podStartSLOduration=5.081225443 podStartE2EDuration="14.071594334s" podCreationTimestamp="2025-12-06 08:24:34 +0000 UTC" firstStartedPulling="2025-12-06 08:24:35.503147278 +0000 UTC m=+758.078852316" lastFinishedPulling="2025-12-06 08:24:44.493516179 +0000 UTC m=+767.069221207" observedRunningTime="2025-12-06 08:24:48.071104571 +0000 UTC m=+770.646809619" watchObservedRunningTime="2025-12-06 08:24:48.071594334 +0000 UTC m=+770.647299372" Dec 06 08:24:50 crc kubenswrapper[4763]: I1206 08:24:50.304126 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:50 crc kubenswrapper[4763]: I1206 08:24:50.342857 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:24:55 crc kubenswrapper[4763]: I1206 08:24:55.327031 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-ppdsw" Dec 06 08:24:56 crc kubenswrapper[4763]: I1206 08:24:56.858073 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-mkf7x" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.037830 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.039103 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.042804 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-sjjfh" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.042943 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.043550 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.060992 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.080812 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq5g9\" (UniqueName: \"kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9\") pod \"openstack-operator-index-g72tt\" (UID: \"9750f203-ba5a-4a59-b70f-9a2793abe5c8\") " pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.185566 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq5g9\" (UniqueName: \"kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9\") pod \"openstack-operator-index-g72tt\" (UID: \"9750f203-ba5a-4a59-b70f-9a2793abe5c8\") " pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.205140 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq5g9\" (UniqueName: \"kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9\") pod \"openstack-operator-index-g72tt\" (UID: \"9750f203-ba5a-4a59-b70f-9a2793abe5c8\") " pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.354049 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:00 crc kubenswrapper[4763]: W1206 08:25:00.739944 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9750f203_ba5a_4a59_b70f_9a2793abe5c8.slice/crio-4f25a9022c041149e1fe95dad47da74e5deaf405ad55867e6b6905aebaa9bb9e WatchSource:0}: Error finding container 4f25a9022c041149e1fe95dad47da74e5deaf405ad55867e6b6905aebaa9bb9e: Status 404 returned error can't find the container with id 4f25a9022c041149e1fe95dad47da74e5deaf405ad55867e6b6905aebaa9bb9e Dec 06 08:25:00 crc kubenswrapper[4763]: I1206 08:25:00.740725 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:01 crc kubenswrapper[4763]: I1206 08:25:01.140181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g72tt" event={"ID":"9750f203-ba5a-4a59-b70f-9a2793abe5c8","Type":"ContainerStarted","Data":"4f25a9022c041149e1fe95dad47da74e5deaf405ad55867e6b6905aebaa9bb9e"} Dec 06 08:25:03 crc kubenswrapper[4763]: I1206 08:25:03.414513 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.018713 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-8fkl9"] Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.019932 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.030100 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8fkl9"] Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.041704 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvdf9\" (UniqueName: \"kubernetes.io/projected/a5d6c3cb-d635-4b17-b52a-d20eb3286ac2-kube-api-access-mvdf9\") pod \"openstack-operator-index-8fkl9\" (UID: \"a5d6c3cb-d635-4b17-b52a-d20eb3286ac2\") " pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.143210 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvdf9\" (UniqueName: \"kubernetes.io/projected/a5d6c3cb-d635-4b17-b52a-d20eb3286ac2-kube-api-access-mvdf9\") pod \"openstack-operator-index-8fkl9\" (UID: \"a5d6c3cb-d635-4b17-b52a-d20eb3286ac2\") " pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.161949 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvdf9\" (UniqueName: \"kubernetes.io/projected/a5d6c3cb-d635-4b17-b52a-d20eb3286ac2-kube-api-access-mvdf9\") pod \"openstack-operator-index-8fkl9\" (UID: \"a5d6c3cb-d635-4b17-b52a-d20eb3286ac2\") " pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.173188 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g72tt" event={"ID":"9750f203-ba5a-4a59-b70f-9a2793abe5c8","Type":"ContainerStarted","Data":"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e"} Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.190048 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-g72tt" podStartSLOduration=1.663843749 podStartE2EDuration="4.19002367s" podCreationTimestamp="2025-12-06 08:25:00 +0000 UTC" firstStartedPulling="2025-12-06 08:25:00.742397337 +0000 UTC m=+783.318102395" lastFinishedPulling="2025-12-06 08:25:03.268577278 +0000 UTC m=+785.844282316" observedRunningTime="2025-12-06 08:25:04.186056565 +0000 UTC m=+786.761761603" watchObservedRunningTime="2025-12-06 08:25:04.19002367 +0000 UTC m=+786.765728708" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.340887 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:04 crc kubenswrapper[4763]: I1206 08:25:04.784990 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8fkl9"] Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.180322 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8fkl9" event={"ID":"a5d6c3cb-d635-4b17-b52a-d20eb3286ac2","Type":"ContainerStarted","Data":"d47be7a492b1bc7a3ca3c887c36c1d48bb2215b7931c3cfcd9d648615301eeea"} Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.180921 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8fkl9" event={"ID":"a5d6c3cb-d635-4b17-b52a-d20eb3286ac2","Type":"ContainerStarted","Data":"134c0ab23e554359a40bd9bf4ce90109bf793d850e18f4e4b814c095a3399228"} Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.180373 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-g72tt" podUID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" containerName="registry-server" containerID="cri-o://e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e" gracePeriod=2 Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.203335 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-8fkl9" podStartSLOduration=1.128952744 podStartE2EDuration="1.203316888s" podCreationTimestamp="2025-12-06 08:25:04 +0000 UTC" firstStartedPulling="2025-12-06 08:25:04.796282903 +0000 UTC m=+787.371987941" lastFinishedPulling="2025-12-06 08:25:04.870647047 +0000 UTC m=+787.446352085" observedRunningTime="2025-12-06 08:25:05.199563689 +0000 UTC m=+787.775268747" watchObservedRunningTime="2025-12-06 08:25:05.203316888 +0000 UTC m=+787.779021926" Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.307612 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4lrrp" Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.543920 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.558956 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq5g9\" (UniqueName: \"kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9\") pod \"9750f203-ba5a-4a59-b70f-9a2793abe5c8\" (UID: \"9750f203-ba5a-4a59-b70f-9a2793abe5c8\") " Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.568312 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9" (OuterVolumeSpecName: "kube-api-access-tq5g9") pod "9750f203-ba5a-4a59-b70f-9a2793abe5c8" (UID: "9750f203-ba5a-4a59-b70f-9a2793abe5c8"). InnerVolumeSpecName "kube-api-access-tq5g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:25:05 crc kubenswrapper[4763]: I1206 08:25:05.660589 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq5g9\" (UniqueName: \"kubernetes.io/projected/9750f203-ba5a-4a59-b70f-9a2793abe5c8-kube-api-access-tq5g9\") on node \"crc\" DevicePath \"\"" Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.191890 4763 generic.go:334] "Generic (PLEG): container finished" podID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" containerID="e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e" exitCode=0 Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.192005 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g72tt" event={"ID":"9750f203-ba5a-4a59-b70f-9a2793abe5c8","Type":"ContainerDied","Data":"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e"} Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.192056 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g72tt" event={"ID":"9750f203-ba5a-4a59-b70f-9a2793abe5c8","Type":"ContainerDied","Data":"4f25a9022c041149e1fe95dad47da74e5deaf405ad55867e6b6905aebaa9bb9e"} Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.192080 4763 scope.go:117] "RemoveContainer" containerID="e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e" Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.193177 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g72tt" Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.216881 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.222104 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-g72tt"] Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.223107 4763 scope.go:117] "RemoveContainer" containerID="e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e" Dec 06 08:25:06 crc kubenswrapper[4763]: E1206 08:25:06.223667 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e\": container with ID starting with e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e not found: ID does not exist" containerID="e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e" Dec 06 08:25:06 crc kubenswrapper[4763]: I1206 08:25:06.223743 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e"} err="failed to get container status \"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e\": rpc error: code = NotFound desc = could not find container \"e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e\": container with ID starting with e4fbbeff31ac78fcd74e918a0774abd8ec184ed0ab3c572723aa34f3964bd50e not found: ID does not exist" Dec 06 08:25:07 crc kubenswrapper[4763]: I1206 08:25:07.729297 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" path="/var/lib/kubelet/pods/9750f203-ba5a-4a59-b70f-9a2793abe5c8/volumes" Dec 06 08:25:14 crc kubenswrapper[4763]: I1206 08:25:14.341427 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:14 crc kubenswrapper[4763]: I1206 08:25:14.342735 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:14 crc kubenswrapper[4763]: I1206 08:25:14.375693 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:15 crc kubenswrapper[4763]: I1206 08:25:15.284325 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-8fkl9" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.884229 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2"] Dec 06 08:25:21 crc kubenswrapper[4763]: E1206 08:25:21.885232 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" containerName="registry-server" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.885248 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" containerName="registry-server" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.885391 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9750f203-ba5a-4a59-b70f-9a2793abe5c8" containerName="registry-server" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.886217 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.888881 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-mgxk5" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.903675 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2"] Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.992987 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkhwz\" (UniqueName: \"kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.993067 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:21 crc kubenswrapper[4763]: I1206 08:25:21.993334 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.094813 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkhwz\" (UniqueName: \"kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.094880 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.095037 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.095415 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.095565 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.115403 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkhwz\" (UniqueName: \"kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz\") pod \"4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.256226 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:22 crc kubenswrapper[4763]: I1206 08:25:22.652826 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2"] Dec 06 08:25:23 crc kubenswrapper[4763]: I1206 08:25:23.321130 4763 generic.go:334] "Generic (PLEG): container finished" podID="91194b81-a09e-491c-b66f-7d8d7628065d" containerID="8928d4349b3876c4396866a972455ace84a222fa9cd519c3d3dfb3492b4f00c3" exitCode=0 Dec 06 08:25:23 crc kubenswrapper[4763]: I1206 08:25:23.321181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" event={"ID":"91194b81-a09e-491c-b66f-7d8d7628065d","Type":"ContainerDied","Data":"8928d4349b3876c4396866a972455ace84a222fa9cd519c3d3dfb3492b4f00c3"} Dec 06 08:25:23 crc kubenswrapper[4763]: I1206 08:25:23.321205 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" event={"ID":"91194b81-a09e-491c-b66f-7d8d7628065d","Type":"ContainerStarted","Data":"2ad689823ff491da813b5369f955496c38ee9c86994f56587abb43487bfad0a8"} Dec 06 08:25:24 crc kubenswrapper[4763]: I1206 08:25:24.327297 4763 generic.go:334] "Generic (PLEG): container finished" podID="91194b81-a09e-491c-b66f-7d8d7628065d" containerID="2c9c4e3ef32e5e314896a58c715f9653a96bbf409c6a070c796f2f0ee1f6421b" exitCode=0 Dec 06 08:25:24 crc kubenswrapper[4763]: I1206 08:25:24.327374 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" event={"ID":"91194b81-a09e-491c-b66f-7d8d7628065d","Type":"ContainerDied","Data":"2c9c4e3ef32e5e314896a58c715f9653a96bbf409c6a070c796f2f0ee1f6421b"} Dec 06 08:25:25 crc kubenswrapper[4763]: I1206 08:25:25.334135 4763 generic.go:334] "Generic (PLEG): container finished" podID="91194b81-a09e-491c-b66f-7d8d7628065d" containerID="377a92d6dc524feb866a8c671ffe3c6db686c56fef456a9aa1a4897048ee6dd7" exitCode=0 Dec 06 08:25:25 crc kubenswrapper[4763]: I1206 08:25:25.334331 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" event={"ID":"91194b81-a09e-491c-b66f-7d8d7628065d","Type":"ContainerDied","Data":"377a92d6dc524feb866a8c671ffe3c6db686c56fef456a9aa1a4897048ee6dd7"} Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.589354 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.760474 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util\") pod \"91194b81-a09e-491c-b66f-7d8d7628065d\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.760562 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle\") pod \"91194b81-a09e-491c-b66f-7d8d7628065d\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.760624 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkhwz\" (UniqueName: \"kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz\") pod \"91194b81-a09e-491c-b66f-7d8d7628065d\" (UID: \"91194b81-a09e-491c-b66f-7d8d7628065d\") " Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.761653 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle" (OuterVolumeSpecName: "bundle") pod "91194b81-a09e-491c-b66f-7d8d7628065d" (UID: "91194b81-a09e-491c-b66f-7d8d7628065d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.769153 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz" (OuterVolumeSpecName: "kube-api-access-jkhwz") pod "91194b81-a09e-491c-b66f-7d8d7628065d" (UID: "91194b81-a09e-491c-b66f-7d8d7628065d"). InnerVolumeSpecName "kube-api-access-jkhwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.782007 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util" (OuterVolumeSpecName: "util") pod "91194b81-a09e-491c-b66f-7d8d7628065d" (UID: "91194b81-a09e-491c-b66f-7d8d7628065d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.861984 4763 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-util\") on node \"crc\" DevicePath \"\"" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.862023 4763 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91194b81-a09e-491c-b66f-7d8d7628065d-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:25:26 crc kubenswrapper[4763]: I1206 08:25:26.862035 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkhwz\" (UniqueName: \"kubernetes.io/projected/91194b81-a09e-491c-b66f-7d8d7628065d-kube-api-access-jkhwz\") on node \"crc\" DevicePath \"\"" Dec 06 08:25:27 crc kubenswrapper[4763]: I1206 08:25:27.349348 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" event={"ID":"91194b81-a09e-491c-b66f-7d8d7628065d","Type":"ContainerDied","Data":"2ad689823ff491da813b5369f955496c38ee9c86994f56587abb43487bfad0a8"} Dec 06 08:25:27 crc kubenswrapper[4763]: I1206 08:25:27.349394 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ad689823ff491da813b5369f955496c38ee9c86994f56587abb43487bfad0a8" Dec 06 08:25:27 crc kubenswrapper[4763]: I1206 08:25:27.349415 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.173859 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6"] Dec 06 08:25:29 crc kubenswrapper[4763]: E1206 08:25:29.176023 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="util" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.176047 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="util" Dec 06 08:25:29 crc kubenswrapper[4763]: E1206 08:25:29.176074 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="pull" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.176083 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="pull" Dec 06 08:25:29 crc kubenswrapper[4763]: E1206 08:25:29.176110 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="extract" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.176119 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="extract" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.176382 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="91194b81-a09e-491c-b66f-7d8d7628065d" containerName="extract" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.177697 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.180358 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-9hhq9" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.208807 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6"] Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.294578 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cznxc\" (UniqueName: \"kubernetes.io/projected/94e8ee83-090b-4636-9953-50d9bf39b2b7-kube-api-access-cznxc\") pod \"openstack-operator-controller-operator-8557c89b5c-6tcs6\" (UID: \"94e8ee83-090b-4636-9953-50d9bf39b2b7\") " pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.396347 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cznxc\" (UniqueName: \"kubernetes.io/projected/94e8ee83-090b-4636-9953-50d9bf39b2b7-kube-api-access-cznxc\") pod \"openstack-operator-controller-operator-8557c89b5c-6tcs6\" (UID: \"94e8ee83-090b-4636-9953-50d9bf39b2b7\") " pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.415435 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cznxc\" (UniqueName: \"kubernetes.io/projected/94e8ee83-090b-4636-9953-50d9bf39b2b7-kube-api-access-cznxc\") pod \"openstack-operator-controller-operator-8557c89b5c-6tcs6\" (UID: \"94e8ee83-090b-4636-9953-50d9bf39b2b7\") " pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:29 crc kubenswrapper[4763]: I1206 08:25:29.514803 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:30 crc kubenswrapper[4763]: I1206 08:25:30.043786 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6"] Dec 06 08:25:30 crc kubenswrapper[4763]: I1206 08:25:30.372668 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" event={"ID":"94e8ee83-090b-4636-9953-50d9bf39b2b7","Type":"ContainerStarted","Data":"2001bd734c2e480ebe191a9e19986c3f9fe357e5ca30ba6e5ff6d76d13bc943f"} Dec 06 08:25:35 crc kubenswrapper[4763]: I1206 08:25:35.410592 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" event={"ID":"94e8ee83-090b-4636-9953-50d9bf39b2b7","Type":"ContainerStarted","Data":"feb592e062b97274aeceee2fc9784475d3b567da3f0f8b205ae2e1cdce3aabef"} Dec 06 08:25:35 crc kubenswrapper[4763]: I1206 08:25:35.411107 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:25:35 crc kubenswrapper[4763]: I1206 08:25:35.439046 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" podStartSLOduration=1.5569411180000001 podStartE2EDuration="6.43902691s" podCreationTimestamp="2025-12-06 08:25:29 +0000 UTC" firstStartedPulling="2025-12-06 08:25:30.054243154 +0000 UTC m=+812.629948202" lastFinishedPulling="2025-12-06 08:25:34.936328966 +0000 UTC m=+817.512033994" observedRunningTime="2025-12-06 08:25:35.436068842 +0000 UTC m=+818.011773880" watchObservedRunningTime="2025-12-06 08:25:35.43902691 +0000 UTC m=+818.014731948" Dec 06 08:25:49 crc kubenswrapper[4763]: I1206 08:25:49.517677 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-8557c89b5c-6tcs6" Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.954614 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4"] Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.956161 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.958180 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-6nfww" Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.969354 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt"] Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.975797 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.983271 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-sw7t9" Dec 06 08:26:08 crc kubenswrapper[4763]: I1206 08:26:08.988270 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.034822 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcsmj\" (UniqueName: \"kubernetes.io/projected/bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5-kube-api-access-hcsmj\") pod \"cinder-operator-controller-manager-859b6ccc6-t2vdt\" (UID: \"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.034906 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8p2r\" (UniqueName: \"kubernetes.io/projected/ff46659d-6be0-4f7b-81a8-f8de0b6331ae-kube-api-access-j8p2r\") pod \"barbican-operator-controller-manager-7d9dfd778-s7cj4\" (UID: \"ff46659d-6be0-4f7b-81a8-f8de0b6331ae\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.040681 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.041929 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.047535 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-zs9zz" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.052809 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.074116 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.085128 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.086312 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.094662 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-jbx4w" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.121129 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.128710 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.129752 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.139382 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8p2r\" (UniqueName: \"kubernetes.io/projected/ff46659d-6be0-4f7b-81a8-f8de0b6331ae-kube-api-access-j8p2r\") pod \"barbican-operator-controller-manager-7d9dfd778-s7cj4\" (UID: \"ff46659d-6be0-4f7b-81a8-f8de0b6331ae\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.139465 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9zdf\" (UniqueName: \"kubernetes.io/projected/88e79272-2e99-462a-b29c-b4d2a34ed95b-kube-api-access-h9zdf\") pod \"heat-operator-controller-manager-5f64f6f8bb-jjs5h\" (UID: \"88e79272-2e99-462a-b29c-b4d2a34ed95b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.139520 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcsmj\" (UniqueName: \"kubernetes.io/projected/bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5-kube-api-access-hcsmj\") pod \"cinder-operator-controller-manager-859b6ccc6-t2vdt\" (UID: \"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.144243 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-b7vbs" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.167042 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.168053 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.171087 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.175455 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.179531 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcsmj\" (UniqueName: \"kubernetes.io/projected/bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5-kube-api-access-hcsmj\") pod \"cinder-operator-controller-manager-859b6ccc6-t2vdt\" (UID: \"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.182408 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8p2r\" (UniqueName: \"kubernetes.io/projected/ff46659d-6be0-4f7b-81a8-f8de0b6331ae-kube-api-access-j8p2r\") pod \"barbican-operator-controller-manager-7d9dfd778-s7cj4\" (UID: \"ff46659d-6be0-4f7b-81a8-f8de0b6331ae\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.185371 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.186675 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.190244 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.192394 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.195344 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-fq9nj" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.195648 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-f4ps6" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.195673 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-8pccl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.204443 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.241511 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9zdf\" (UniqueName: \"kubernetes.io/projected/88e79272-2e99-462a-b29c-b4d2a34ed95b-kube-api-access-h9zdf\") pod \"heat-operator-controller-manager-5f64f6f8bb-jjs5h\" (UID: \"88e79272-2e99-462a-b29c-b4d2a34ed95b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.241582 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxlk5\" (UniqueName: \"kubernetes.io/projected/5674dbca-4697-4993-888b-680428fba7ba-kube-api-access-wxlk5\") pod \"designate-operator-controller-manager-78b4bc895b-ngktl\" (UID: \"5674dbca-4697-4993-888b-680428fba7ba\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.241626 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np798\" (UniqueName: \"kubernetes.io/projected/cd89c4f2-cf50-4183-b364-d4886b5369a6-kube-api-access-np798\") pod \"glance-operator-controller-manager-77987cd8cd-h9ss4\" (UID: \"cd89c4f2-cf50-4183-b364-d4886b5369a6\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.263363 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.287720 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.288051 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.296669 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9zdf\" (UniqueName: \"kubernetes.io/projected/88e79272-2e99-462a-b29c-b4d2a34ed95b-kube-api-access-h9zdf\") pod \"heat-operator-controller-manager-5f64f6f8bb-jjs5h\" (UID: \"88e79272-2e99-462a-b29c-b4d2a34ed95b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.298519 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.299957 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.317551 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.322130 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bz9jf" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.322753 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.346673 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv5qk\" (UniqueName: \"kubernetes.io/projected/3e707b64-79d0-4401-9401-a80ed24a9658-kube-api-access-cv5qk\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.346748 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.346809 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxlk5\" (UniqueName: \"kubernetes.io/projected/5674dbca-4697-4993-888b-680428fba7ba-kube-api-access-wxlk5\") pod \"designate-operator-controller-manager-78b4bc895b-ngktl\" (UID: \"5674dbca-4697-4993-888b-680428fba7ba\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.346889 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xls9p\" (UniqueName: \"kubernetes.io/projected/7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b-kube-api-access-xls9p\") pod \"ironic-operator-controller-manager-6c548fd776-bvdzz\" (UID: \"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.346968 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np798\" (UniqueName: \"kubernetes.io/projected/cd89c4f2-cf50-4183-b364-d4886b5369a6-kube-api-access-np798\") pod \"glance-operator-controller-manager-77987cd8cd-h9ss4\" (UID: \"cd89c4f2-cf50-4183-b364-d4886b5369a6\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.347100 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhq47\" (UniqueName: \"kubernetes.io/projected/3d914ebd-1d7f-405f-aa0c-c8b254ec7196-kube-api-access-nhq47\") pod \"horizon-operator-controller-manager-68c6d99b8f-dbttk\" (UID: \"3d914ebd-1d7f-405f-aa0c-c8b254ec7196\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.351743 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.355062 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.362022 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-qtt28" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.365939 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.367056 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.380281 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-5g68z" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.380460 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.387322 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.397544 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxlk5\" (UniqueName: \"kubernetes.io/projected/5674dbca-4697-4993-888b-680428fba7ba-kube-api-access-wxlk5\") pod \"designate-operator-controller-manager-78b4bc895b-ngktl\" (UID: \"5674dbca-4697-4993-888b-680428fba7ba\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.416222 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.418272 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.421544 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.423644 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np798\" (UniqueName: \"kubernetes.io/projected/cd89c4f2-cf50-4183-b364-d4886b5369a6-kube-api-access-np798\") pod \"glance-operator-controller-manager-77987cd8cd-h9ss4\" (UID: \"cd89c4f2-cf50-4183-b364-d4886b5369a6\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.424094 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.437202 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-gmpv4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.437352 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-pg7lv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.447832 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.448782 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhq47\" (UniqueName: \"kubernetes.io/projected/3d914ebd-1d7f-405f-aa0c-c8b254ec7196-kube-api-access-nhq47\") pod \"horizon-operator-controller-manager-68c6d99b8f-dbttk\" (UID: \"3d914ebd-1d7f-405f-aa0c-c8b254ec7196\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.448815 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjcd7\" (UniqueName: \"kubernetes.io/projected/2e2c64e0-cee9-47bd-afca-2fadeeb61b01-kube-api-access-tjcd7\") pod \"keystone-operator-controller-manager-7765d96ddf-nrmpg\" (UID: \"2e2c64e0-cee9-47bd-afca-2fadeeb61b01\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.448847 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv5qk\" (UniqueName: \"kubernetes.io/projected/3e707b64-79d0-4401-9401-a80ed24a9658-kube-api-access-cv5qk\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.448872 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.448926 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xls9p\" (UniqueName: \"kubernetes.io/projected/7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b-kube-api-access-xls9p\") pod \"ironic-operator-controller-manager-6c548fd776-bvdzz\" (UID: \"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:09 crc kubenswrapper[4763]: E1206 08:26:09.449470 4763 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:09 crc kubenswrapper[4763]: E1206 08:26:09.449509 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert podName:3e707b64-79d0-4401-9401-a80ed24a9658 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:09.949492685 +0000 UTC m=+852.525197723 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert") pod "infra-operator-controller-manager-78d48bff9d-kj44v" (UID: "3e707b64-79d0-4401-9401-a80ed24a9658") : secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.543944 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv5qk\" (UniqueName: \"kubernetes.io/projected/3e707b64-79d0-4401-9401-a80ed24a9658-kube-api-access-cv5qk\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.575632 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.596030 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pm8c\" (UniqueName: \"kubernetes.io/projected/3d7a6c13-0b20-44ac-afb5-6d67630877eb-kube-api-access-7pm8c\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-j4fds\" (UID: \"3d7a6c13-0b20-44ac-afb5-6d67630877eb\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.596111 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjcd7\" (UniqueName: \"kubernetes.io/projected/2e2c64e0-cee9-47bd-afca-2fadeeb61b01-kube-api-access-tjcd7\") pod \"keystone-operator-controller-manager-7765d96ddf-nrmpg\" (UID: \"2e2c64e0-cee9-47bd-afca-2fadeeb61b01\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.596165 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dd4d\" (UniqueName: \"kubernetes.io/projected/1cff2610-ab42-4f8d-8e4a-22218c0f30e0-kube-api-access-7dd4d\") pod \"manila-operator-controller-manager-7c79b5df47-svncl\" (UID: \"1cff2610-ab42-4f8d-8e4a-22218c0f30e0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.596189 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vz87\" (UniqueName: \"kubernetes.io/projected/ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83-kube-api-access-7vz87\") pod \"nova-operator-controller-manager-697bc559fc-78qkv\" (UID: \"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.599375 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.619454 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.631347 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.631507 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz2kc\" (UniqueName: \"kubernetes.io/projected/a6e1401e-85a2-4477-96d2-58acbc583139-kube-api-access-fz2kc\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rb7dw\" (UID: \"a6e1401e-85a2-4477-96d2-58acbc583139\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.636765 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xls9p\" (UniqueName: \"kubernetes.io/projected/7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b-kube-api-access-xls9p\") pod \"ironic-operator-controller-manager-6c548fd776-bvdzz\" (UID: \"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.706021 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhq47\" (UniqueName: \"kubernetes.io/projected/3d914ebd-1d7f-405f-aa0c-c8b254ec7196-kube-api-access-nhq47\") pod \"horizon-operator-controller-manager-68c6d99b8f-dbttk\" (UID: \"3d914ebd-1d7f-405f-aa0c-c8b254ec7196\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.726652 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjcd7\" (UniqueName: \"kubernetes.io/projected/2e2c64e0-cee9-47bd-afca-2fadeeb61b01-kube-api-access-tjcd7\") pod \"keystone-operator-controller-manager-7765d96ddf-nrmpg\" (UID: \"2e2c64e0-cee9-47bd-afca-2fadeeb61b01\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.733859 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.734921 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz2kc\" (UniqueName: \"kubernetes.io/projected/a6e1401e-85a2-4477-96d2-58acbc583139-kube-api-access-fz2kc\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rb7dw\" (UID: \"a6e1401e-85a2-4477-96d2-58acbc583139\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.734963 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pm8c\" (UniqueName: \"kubernetes.io/projected/3d7a6c13-0b20-44ac-afb5-6d67630877eb-kube-api-access-7pm8c\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-j4fds\" (UID: \"3d7a6c13-0b20-44ac-afb5-6d67630877eb\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.734995 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dd4d\" (UniqueName: \"kubernetes.io/projected/1cff2610-ab42-4f8d-8e4a-22218c0f30e0-kube-api-access-7dd4d\") pod \"manila-operator-controller-manager-7c79b5df47-svncl\" (UID: \"1cff2610-ab42-4f8d-8e4a-22218c0f30e0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.735011 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vz87\" (UniqueName: \"kubernetes.io/projected/ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83-kube-api-access-7vz87\") pod \"nova-operator-controller-manager-697bc559fc-78qkv\" (UID: \"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.758633 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz2kc\" (UniqueName: \"kubernetes.io/projected/a6e1401e-85a2-4477-96d2-58acbc583139-kube-api-access-fz2kc\") pod \"mariadb-operator-controller-manager-56bbcc9d85-rb7dw\" (UID: \"a6e1401e-85a2-4477-96d2-58acbc583139\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.760568 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vz87\" (UniqueName: \"kubernetes.io/projected/ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83-kube-api-access-7vz87\") pod \"nova-operator-controller-manager-697bc559fc-78qkv\" (UID: \"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.770362 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dd4d\" (UniqueName: \"kubernetes.io/projected/1cff2610-ab42-4f8d-8e4a-22218c0f30e0-kube-api-access-7dd4d\") pod \"manila-operator-controller-manager-7c79b5df47-svncl\" (UID: \"1cff2610-ab42-4f8d-8e4a-22218c0f30e0\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.773856 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pm8c\" (UniqueName: \"kubernetes.io/projected/3d7a6c13-0b20-44ac-afb5-6d67630877eb-kube-api-access-7pm8c\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-j4fds\" (UID: \"3d7a6c13-0b20-44ac-afb5-6d67630877eb\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.798853 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.802028 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.841322 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.849414 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-h47jh"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.856098 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-h47jh"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.856132 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.856575 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.857094 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.857132 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.858179 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.858195 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fkptp"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.858884 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fkptp"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.858909 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.860098 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.860357 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.860570 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.862159 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9s4tc" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.864914 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-szdk6" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.865048 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-pgg59" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.865182 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-fsktx" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.872436 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.874133 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.880831 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2kjht" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.882318 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.890486 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.897664 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.898819 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.911022 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zhxph"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.912870 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-8nqxl" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.912979 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.914882 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7555k" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.918271 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.923062 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.923183 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.926876 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.931147 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-bh6n5" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939626 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqrhd\" (UniqueName: \"kubernetes.io/projected/c245a4f1-0cf3-4627-ad45-ce24db12fc93-kube-api-access-qqrhd\") pod \"ovn-operator-controller-manager-b6456fdb6-dcrcf\" (UID: \"c245a4f1-0cf3-4627-ad45-ce24db12fc93\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939692 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5gtw\" (UniqueName: \"kubernetes.io/projected/1aa32609-8006-42dd-94d3-0340547ed370-kube-api-access-f5gtw\") pod \"swift-operator-controller-manager-5f8c65bbfc-8jwdv\" (UID: \"1aa32609-8006-42dd-94d3-0340547ed370\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939721 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l57sw\" (UniqueName: \"kubernetes.io/projected/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-kube-api-access-l57sw\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939745 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z85s\" (UniqueName: \"kubernetes.io/projected/e4139d53-17de-4e12-a43a-3f571154e203-kube-api-access-5z85s\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vzhvh\" (UID: \"e4139d53-17de-4e12-a43a-3f571154e203\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939782 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jsll\" (UniqueName: \"kubernetes.io/projected/43a9b006-b703-46ad-a74b-f00752e25fdc-kube-api-access-5jsll\") pod \"placement-operator-controller-manager-78f8948974-fkptp\" (UID: \"43a9b006-b703-46ad-a74b-f00752e25fdc\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939802 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.939818 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtkhs\" (UniqueName: \"kubernetes.io/projected/e91554af-5d2b-4477-be5e-314a9b6e901d-kube-api-access-xtkhs\") pod \"octavia-operator-controller-manager-998648c74-h47jh\" (UID: \"e91554af-5d2b-4477-be5e-314a9b6e901d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.940860 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.948027 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7"] Dec 06 08:26:09 crc kubenswrapper[4763]: I1206 08:26:09.979420 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zhxph"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.042247 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048419 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqrhd\" (UniqueName: \"kubernetes.io/projected/c245a4f1-0cf3-4627-ad45-ce24db12fc93-kube-api-access-qqrhd\") pod \"ovn-operator-controller-manager-b6456fdb6-dcrcf\" (UID: \"c245a4f1-0cf3-4627-ad45-ce24db12fc93\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048459 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8dnr\" (UniqueName: \"kubernetes.io/projected/fc8f06be-5292-423d-bba6-e50068054197-kube-api-access-r8dnr\") pod \"watcher-operator-controller-manager-7d48f48f5f-4rgh7\" (UID: \"fc8f06be-5292-423d-bba6-e50068054197\") " pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048532 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5gtw\" (UniqueName: \"kubernetes.io/projected/1aa32609-8006-42dd-94d3-0340547ed370-kube-api-access-f5gtw\") pod \"swift-operator-controller-manager-5f8c65bbfc-8jwdv\" (UID: \"1aa32609-8006-42dd-94d3-0340547ed370\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048568 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l57sw\" (UniqueName: \"kubernetes.io/projected/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-kube-api-access-l57sw\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048600 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjcnh\" (UniqueName: \"kubernetes.io/projected/94ab8de3-6887-460c-a3c0-d0cf4dcf4ead-kube-api-access-jjcnh\") pod \"test-operator-controller-manager-5854674fcc-zhxph\" (UID: \"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048631 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z85s\" (UniqueName: \"kubernetes.io/projected/e4139d53-17de-4e12-a43a-3f571154e203-kube-api-access-5z85s\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vzhvh\" (UID: \"e4139d53-17de-4e12-a43a-3f571154e203\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048654 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jsll\" (UniqueName: \"kubernetes.io/projected/43a9b006-b703-46ad-a74b-f00752e25fdc-kube-api-access-5jsll\") pod \"placement-operator-controller-manager-78f8948974-fkptp\" (UID: \"43a9b006-b703-46ad-a74b-f00752e25fdc\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtkhs\" (UniqueName: \"kubernetes.io/projected/e91554af-5d2b-4477-be5e-314a9b6e901d-kube-api-access-xtkhs\") pod \"octavia-operator-controller-manager-998648c74-h47jh\" (UID: \"e91554af-5d2b-4477-be5e-314a9b6e901d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.048706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.042891 4763 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.048877 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert podName:3e707b64-79d0-4401-9401-a80ed24a9658 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:11.048860623 +0000 UTC m=+853.624565661 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert") pod "infra-operator-controller-manager-78d48bff9d-kj44v" (UID: "3e707b64-79d0-4401-9401-a80ed24a9658") : secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.048827 4763 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.049493 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert podName:f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c nodeName:}" failed. No retries permitted until 2025-12-06 08:26:10.549467279 +0000 UTC m=+853.125172317 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" (UID: "f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.052436 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.053456 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.062786 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.062967 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.063282 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-trzwl" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.080097 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.080706 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtkhs\" (UniqueName: \"kubernetes.io/projected/e91554af-5d2b-4477-be5e-314a9b6e901d-kube-api-access-xtkhs\") pod \"octavia-operator-controller-manager-998648c74-h47jh\" (UID: \"e91554af-5d2b-4477-be5e-314a9b6e901d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.081106 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jsll\" (UniqueName: \"kubernetes.io/projected/43a9b006-b703-46ad-a74b-f00752e25fdc-kube-api-access-5jsll\") pod \"placement-operator-controller-manager-78f8948974-fkptp\" (UID: \"43a9b006-b703-46ad-a74b-f00752e25fdc\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.083369 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqrhd\" (UniqueName: \"kubernetes.io/projected/c245a4f1-0cf3-4627-ad45-ce24db12fc93-kube-api-access-qqrhd\") pod \"ovn-operator-controller-manager-b6456fdb6-dcrcf\" (UID: \"c245a4f1-0cf3-4627-ad45-ce24db12fc93\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.091421 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5gtw\" (UniqueName: \"kubernetes.io/projected/1aa32609-8006-42dd-94d3-0340547ed370-kube-api-access-f5gtw\") pod \"swift-operator-controller-manager-5f8c65bbfc-8jwdv\" (UID: \"1aa32609-8006-42dd-94d3-0340547ed370\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.092633 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z85s\" (UniqueName: \"kubernetes.io/projected/e4139d53-17de-4e12-a43a-3f571154e203-kube-api-access-5z85s\") pod \"telemetry-operator-controller-manager-76cc84c6bb-vzhvh\" (UID: \"e4139d53-17de-4e12-a43a-3f571154e203\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.103338 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.104504 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.118698 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l57sw\" (UniqueName: \"kubernetes.io/projected/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-kube-api-access-l57sw\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.140883 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-snndv" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150541 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150609 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8dnr\" (UniqueName: \"kubernetes.io/projected/fc8f06be-5292-423d-bba6-e50068054197-kube-api-access-r8dnr\") pod \"watcher-operator-controller-manager-7d48f48f5f-4rgh7\" (UID: \"fc8f06be-5292-423d-bba6-e50068054197\") " pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150674 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjcnh\" (UniqueName: \"kubernetes.io/projected/94ab8de3-6887-460c-a3c0-d0cf4dcf4ead-kube-api-access-jjcnh\") pod \"test-operator-controller-manager-5854674fcc-zhxph\" (UID: \"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150816 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmcd8\" (UniqueName: \"kubernetes.io/projected/c81486b2-5d29-4032-9db8-8f8266846f74-kube-api-access-mmcd8\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150885 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm9xh\" (UniqueName: \"kubernetes.io/projected/b9daf37b-2ddd-4324-98d5-ab782c45de9a-kube-api-access-jm9xh\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dsc64\" (UID: \"b9daf37b-2ddd-4324-98d5-ab782c45de9a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.150934 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.157787 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.166756 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.203427 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjcnh\" (UniqueName: \"kubernetes.io/projected/94ab8de3-6887-460c-a3c0-d0cf4dcf4ead-kube-api-access-jjcnh\") pod \"test-operator-controller-manager-5854674fcc-zhxph\" (UID: \"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.207950 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8dnr\" (UniqueName: \"kubernetes.io/projected/fc8f06be-5292-423d-bba6-e50068054197-kube-api-access-r8dnr\") pod \"watcher-operator-controller-manager-7d48f48f5f-4rgh7\" (UID: \"fc8f06be-5292-423d-bba6-e50068054197\") " pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.245816 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.256296 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.256450 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmcd8\" (UniqueName: \"kubernetes.io/projected/c81486b2-5d29-4032-9db8-8f8266846f74-kube-api-access-mmcd8\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.256534 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm9xh\" (UniqueName: \"kubernetes.io/projected/b9daf37b-2ddd-4324-98d5-ab782c45de9a-kube-api-access-jm9xh\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dsc64\" (UID: \"b9daf37b-2ddd-4324-98d5-ab782c45de9a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.256651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.256865 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.256942 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:10.756923574 +0000 UTC m=+853.332628612 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.257594 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.257878 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:10.75785434 +0000 UTC m=+853.333559378 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.296036 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.299557 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm9xh\" (UniqueName: \"kubernetes.io/projected/b9daf37b-2ddd-4324-98d5-ab782c45de9a-kube-api-access-jm9xh\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dsc64\" (UID: \"b9daf37b-2ddd-4324-98d5-ab782c45de9a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.299931 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.308176 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmcd8\" (UniqueName: \"kubernetes.io/projected/c81486b2-5d29-4032-9db8-8f8266846f74-kube-api-access-mmcd8\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.344373 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.347859 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.372187 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.372253 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.391759 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.421709 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.449321 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.470779 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.566106 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.566307 4763 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.566581 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert podName:f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c nodeName:}" failed. No retries permitted until 2025-12-06 08:26:11.566559931 +0000 UTC m=+854.142264969 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" (UID: "f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.576841 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.581815 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl"] Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.746727 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" event={"ID":"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5","Type":"ContainerStarted","Data":"8b5961ba030224434d7a84946383acb4057ced24cb8a044080cc2093896838c2"} Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.748065 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" event={"ID":"ff46659d-6be0-4f7b-81a8-f8de0b6331ae","Type":"ContainerStarted","Data":"6efbec481d4e8924f72a3226421dcfae0e0d53eab985fa4d40db78363b544943"} Dec 06 08:26:10 crc kubenswrapper[4763]: W1206 08:26:10.760998 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88e79272_2e99_462a_b29c_b4d2a34ed95b.slice/crio-52a58333d546ac69025ad4014c34d1ad69d38bf5ef5bbce6c6b8518e5fb376bc WatchSource:0}: Error finding container 52a58333d546ac69025ad4014c34d1ad69d38bf5ef5bbce6c6b8518e5fb376bc: Status 404 returned error can't find the container with id 52a58333d546ac69025ad4014c34d1ad69d38bf5ef5bbce6c6b8518e5fb376bc Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.780403 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.780469 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.780624 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.780670 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:11.780654405 +0000 UTC m=+854.356359443 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.781402 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: E1206 08:26:10.781425 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:11.781418086 +0000 UTC m=+854.357123124 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:10 crc kubenswrapper[4763]: I1206 08:26:10.970356 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.106826 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.107034 4763 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.107085 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert podName:3e707b64-79d0-4401-9401-a80ed24a9658 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:13.107071275 +0000 UTC m=+855.682776313 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert") pod "infra-operator-controller-manager-78d48bff9d-kj44v" (UID: "3e707b64-79d0-4401-9401-a80ed24a9658") : secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.123657 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.163825 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.174687 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.247174 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw"] Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.249917 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6e1401e_85a2_4477_96d2_58acbc583139.slice/crio-d5f69fcaec3e7f45d468ed94eb45ef91163a480d3328f45994625054a8eefb62 WatchSource:0}: Error finding container d5f69fcaec3e7f45d468ed94eb45ef91163a480d3328f45994625054a8eefb62: Status 404 returned error can't find the container with id d5f69fcaec3e7f45d468ed94eb45ef91163a480d3328f45994625054a8eefb62 Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.319587 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.354007 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.360538 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv"] Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.416278 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce2bb7ad_0ccc_425c_b2a6_1718f8a5ac83.slice/crio-fb1974be4a497b2ee6a23c8ba7b3bab0bafc32b030b840186f56998334689646 WatchSource:0}: Error finding container fb1974be4a497b2ee6a23c8ba7b3bab0bafc32b030b840186f56998334689646: Status 404 returned error can't find the container with id fb1974be4a497b2ee6a23c8ba7b3bab0bafc32b030b840186f56998334689646 Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.475088 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.482981 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-h47jh"] Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.484601 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc245a4f1_0cf3_4627_ad45_ce24db12fc93.slice/crio-ec610c01cdd1a3a00e5b64d5e5fb50a3c9448606cd09371e246387ee6ce1f9a4 WatchSource:0}: Error finding container ec610c01cdd1a3a00e5b64d5e5fb50a3c9448606cd09371e246387ee6ce1f9a4: Status 404 returned error can't find the container with id ec610c01cdd1a3a00e5b64d5e5fb50a3c9448606cd09371e246387ee6ce1f9a4 Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.506332 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fkptp"] Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.530969 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xtkhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-h47jh_openstack-operators(e91554af-5d2b-4477-be5e-314a9b6e901d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.532736 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xtkhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-h47jh_openstack-operators(e91554af-5d2b-4477-be5e-314a9b6e901d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.535584 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" podUID="e91554af-5d2b-4477-be5e-314a9b6e901d" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.618245 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.618461 4763 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.618556 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert podName:f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c nodeName:}" failed. No retries permitted until 2025-12-06 08:26:13.618530655 +0000 UTC m=+856.194235693 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" (UID: "f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.678317 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7"] Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.680649 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64"] Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.683524 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc8f06be_5292_423d_bba6_e50068054197.slice/crio-ca9bf9003aee3e161c4e34b39858bc0ce405c00d88f40fceebb385236d95363f WatchSource:0}: Error finding container ca9bf9003aee3e161c4e34b39858bc0ce405c00d88f40fceebb385236d95363f: Status 404 returned error can't find the container with id ca9bf9003aee3e161c4e34b39858bc0ce405c00d88f40fceebb385236d95363f Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.686173 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9daf37b_2ddd_4324_98d5_ab782c45de9a.slice/crio-c1be854a27e4c3056738d95f38998a366671087f80a1fac3123b969eb5852a92 WatchSource:0}: Error finding container c1be854a27e4c3056738d95f38998a366671087f80a1fac3123b969eb5852a92: Status 404 returned error can't find the container with id c1be854a27e4c3056738d95f38998a366671087f80a1fac3123b969eb5852a92 Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.686642 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv"] Dec 06 08:26:11 crc kubenswrapper[4763]: W1206 08:26:11.688703 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aa32609_8006_42dd_94d3_0340547ed370.slice/crio-efa11e69b7f058bfc88e60ff3d00e4f20acc405e2aff555d0ad7362af9e43eee WatchSource:0}: Error finding container efa11e69b7f058bfc88e60ff3d00e4f20acc405e2aff555d0ad7362af9e43eee: Status 404 returned error can't find the container with id efa11e69b7f058bfc88e60ff3d00e4f20acc405e2aff555d0ad7362af9e43eee Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.691047 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f5gtw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-8jwdv_openstack-operators(1aa32609-8006-42dd-94d3-0340547ed370): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.699505 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jm9xh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-dsc64_openstack-operators(b9daf37b-2ddd-4324-98d5-ab782c45de9a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.699620 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f5gtw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-8jwdv_openstack-operators(1aa32609-8006-42dd-94d3-0340547ed370): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.701619 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" podUID="1aa32609-8006-42dd-94d3-0340547ed370" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.701666 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" podUID="b9daf37b-2ddd-4324-98d5-ab782c45de9a" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.717924 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zhxph"] Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.733950 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jjcnh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-zhxph_openstack-operators(94ab8de3-6887-460c-a3c0-d0cf4dcf4ead): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.738317 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jjcnh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-zhxph_openstack-operators(94ab8de3-6887-460c-a3c0-d0cf4dcf4ead): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.740356 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" podUID="94ab8de3-6887-460c-a3c0-d0cf4dcf4ead" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.763651 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" event={"ID":"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead","Type":"ContainerStarted","Data":"d2bb2e7499c011c4cba5d19eaf402807274264e0cc2f7c5fb69a392b6dd014ef"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.766290 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" event={"ID":"43a9b006-b703-46ad-a74b-f00752e25fdc","Type":"ContainerStarted","Data":"716b860c534f380cdc23d412683271ecc0c80d6013c475aaf20bf8959d748b0c"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.768884 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" event={"ID":"cd89c4f2-cf50-4183-b364-d4886b5369a6","Type":"ContainerStarted","Data":"606de313359c1639d65c7924a67d3b925c6488bceee4c112b10319fa9b22473c"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.772536 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" event={"ID":"e91554af-5d2b-4477-be5e-314a9b6e901d","Type":"ContainerStarted","Data":"e0c85dec3cef41c5455611476e4dedc3e0b7ee3da8029bc06287a70733df12b7"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.775067 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" event={"ID":"1cff2610-ab42-4f8d-8e4a-22218c0f30e0","Type":"ContainerStarted","Data":"4f2aeb5b497c81962f88478b6344b380daa8468977d20c6b7327e5ba9b56e335"} Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.777272 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" podUID="94ab8de3-6887-460c-a3c0-d0cf4dcf4ead" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.777501 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" podUID="e91554af-5d2b-4477-be5e-314a9b6e901d" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.779108 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" event={"ID":"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b","Type":"ContainerStarted","Data":"2f1ef53c8ccf1f6c806ef0d3e56f7ea2d6551f9b36d4748eb4b18b3c60a1f0cf"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.803778 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" event={"ID":"b9daf37b-2ddd-4324-98d5-ab782c45de9a","Type":"ContainerStarted","Data":"c1be854a27e4c3056738d95f38998a366671087f80a1fac3123b969eb5852a92"} Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.805087 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" podUID="b9daf37b-2ddd-4324-98d5-ab782c45de9a" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.806286 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" event={"ID":"c245a4f1-0cf3-4627-ad45-ce24db12fc93","Type":"ContainerStarted","Data":"ec610c01cdd1a3a00e5b64d5e5fb50a3c9448606cd09371e246387ee6ce1f9a4"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.807710 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" event={"ID":"fc8f06be-5292-423d-bba6-e50068054197","Type":"ContainerStarted","Data":"ca9bf9003aee3e161c4e34b39858bc0ce405c00d88f40fceebb385236d95363f"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.809679 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" event={"ID":"2e2c64e0-cee9-47bd-afca-2fadeeb61b01","Type":"ContainerStarted","Data":"b2fa5caa39135eca087d13b21e8003cc3382f8af7070fb702c54ead95d7b2130"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.811669 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" event={"ID":"a6e1401e-85a2-4477-96d2-58acbc583139","Type":"ContainerStarted","Data":"d5f69fcaec3e7f45d468ed94eb45ef91163a480d3328f45994625054a8eefb62"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.815310 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" event={"ID":"e4139d53-17de-4e12-a43a-3f571154e203","Type":"ContainerStarted","Data":"33e256357b42e9f54cfe1ab9f321dac0c216dd18ec9670d153975f07be5cd51d"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.828964 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.829130 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.829258 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.829306 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:13.829290239 +0000 UTC m=+856.404995277 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.829626 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.829719 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:13.829696471 +0000 UTC m=+856.405401509 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.834490 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" event={"ID":"88e79272-2e99-462a-b29c-b4d2a34ed95b","Type":"ContainerStarted","Data":"52a58333d546ac69025ad4014c34d1ad69d38bf5ef5bbce6c6b8518e5fb376bc"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.841460 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" event={"ID":"5674dbca-4697-4993-888b-680428fba7ba","Type":"ContainerStarted","Data":"3efe1d5554a143b08824c144e66094d95ebd522cd98a25165c04a7becb4b649b"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.843405 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" event={"ID":"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83","Type":"ContainerStarted","Data":"fb1974be4a497b2ee6a23c8ba7b3bab0bafc32b030b840186f56998334689646"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.845015 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" event={"ID":"3d914ebd-1d7f-405f-aa0c-c8b254ec7196","Type":"ContainerStarted","Data":"e541e3bfc55303df804f6efca26c64b6d427d1ab69f4dc47cd799bdea00a8716"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.848271 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" event={"ID":"3d7a6c13-0b20-44ac-afb5-6d67630877eb","Type":"ContainerStarted","Data":"d9de911d5d4f3c6c464048cdf454ac3efad4365b008a945db67d733d2d19eb14"} Dec 06 08:26:11 crc kubenswrapper[4763]: I1206 08:26:11.850653 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" event={"ID":"1aa32609-8006-42dd-94d3-0340547ed370","Type":"ContainerStarted","Data":"efa11e69b7f058bfc88e60ff3d00e4f20acc405e2aff555d0ad7362af9e43eee"} Dec 06 08:26:11 crc kubenswrapper[4763]: E1206 08:26:11.879238 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" podUID="1aa32609-8006-42dd-94d3-0340547ed370" Dec 06 08:26:12 crc kubenswrapper[4763]: E1206 08:26:12.894136 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" podUID="b9daf37b-2ddd-4324-98d5-ab782c45de9a" Dec 06 08:26:12 crc kubenswrapper[4763]: E1206 08:26:12.894631 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" podUID="1aa32609-8006-42dd-94d3-0340547ed370" Dec 06 08:26:12 crc kubenswrapper[4763]: E1206 08:26:12.901681 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" podUID="94ab8de3-6887-460c-a3c0-d0cf4dcf4ead" Dec 06 08:26:12 crc kubenswrapper[4763]: E1206 08:26:12.903134 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" podUID="e91554af-5d2b-4477-be5e-314a9b6e901d" Dec 06 08:26:13 crc kubenswrapper[4763]: I1206 08:26:13.169951 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.170130 4763 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.170181 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert podName:3e707b64-79d0-4401-9401-a80ed24a9658 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:17.170168162 +0000 UTC m=+859.745873200 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert") pod "infra-operator-controller-manager-78d48bff9d-kj44v" (UID: "3e707b64-79d0-4401-9401-a80ed24a9658") : secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: I1206 08:26:13.758839 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.759125 4763 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.759183 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert podName:f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c nodeName:}" failed. No retries permitted until 2025-12-06 08:26:17.7591653 +0000 UTC m=+860.334870338 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" (UID: "f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: I1206 08:26:13.860140 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:13 crc kubenswrapper[4763]: I1206 08:26:13.860228 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.860347 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.860404 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.860431 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:17.860410276 +0000 UTC m=+860.436115364 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:13 crc kubenswrapper[4763]: E1206 08:26:13.860474 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:17.860455367 +0000 UTC m=+860.436160465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.232793 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.233441 4763 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.233498 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert podName:3e707b64-79d0-4401-9401-a80ed24a9658 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:25.233479133 +0000 UTC m=+867.809184171 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert") pod "infra-operator-controller-manager-78d48bff9d-kj44v" (UID: "3e707b64-79d0-4401-9401-a80ed24a9658") : secret "infra-operator-webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.267472 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.276379 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.285101 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.439371 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-225jh\" (UniqueName: \"kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.439605 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.439647 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.541152 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.541207 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.541249 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-225jh\" (UniqueName: \"kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.541755 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.541824 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.576060 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-225jh\" (UniqueName: \"kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh\") pod \"redhat-marketplace-mzzdf\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.602809 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.844454 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.844659 4763 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.844695 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert podName:f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c nodeName:}" failed. No retries permitted until 2025-12-06 08:26:25.84468398 +0000 UTC m=+868.420389008 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" (UID: "f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.950358 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:17 crc kubenswrapper[4763]: I1206 08:26:17.950425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.950546 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.950595 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.950645 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:25.950619101 +0000 UTC m=+868.526324209 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:17 crc kubenswrapper[4763]: E1206 08:26:17.950668 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:25.950659632 +0000 UTC m=+868.526364770 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.255122 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.275482 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e707b64-79d0-4401-9401-a80ed24a9658-cert\") pod \"infra-operator-controller-manager-78d48bff9d-kj44v\" (UID: \"3e707b64-79d0-4401-9401-a80ed24a9658\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.482317 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.766761 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.766962 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nhq47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-dbttk_openstack-operators(3d914ebd-1d7f-405f-aa0c-c8b254ec7196): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.864351 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.870536 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp\" (UID: \"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.929526 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.966360 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:25 crc kubenswrapper[4763]: I1206 08:26:25.966442 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.966561 4763 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.966621 4763 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.966645 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:41.966624415 +0000 UTC m=+884.542329453 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "webhook-server-cert" not found Dec 06 08:26:25 crc kubenswrapper[4763]: E1206 08:26:25.966673 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs podName:c81486b2-5d29-4032-9db8-8f8266846f74 nodeName:}" failed. No retries permitted until 2025-12-06 08:26:41.966658916 +0000 UTC m=+884.542363954 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs") pod "openstack-operator-controller-manager-5b68f46455-24xn4" (UID: "c81486b2-5d29-4032-9db8-8f8266846f74") : secret "metrics-server-cert" not found Dec 06 08:26:26 crc kubenswrapper[4763]: E1206 08:26:26.838189 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 06 08:26:26 crc kubenswrapper[4763]: E1206 08:26:26.838545 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5z85s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-vzhvh_openstack-operators(e4139d53-17de-4e12-a43a-3f571154e203): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:27 crc kubenswrapper[4763]: E1206 08:26:27.538756 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 06 08:26:27 crc kubenswrapper[4763]: E1206 08:26:27.538974 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5jsll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-fkptp_openstack-operators(43a9b006-b703-46ad-a74b-f00752e25fdc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:30 crc kubenswrapper[4763]: E1206 08:26:30.495747 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 06 08:26:30 crc kubenswrapper[4763]: E1206 08:26:30.496138 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-np798,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-h9ss4_openstack-operators(cd89c4f2-cf50-4183-b364-d4886b5369a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:31 crc kubenswrapper[4763]: E1206 08:26:31.135685 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 06 08:26:31 crc kubenswrapper[4763]: E1206 08:26:31.135882 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qqrhd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-dcrcf_openstack-operators(c245a4f1-0cf3-4627-ad45-ce24db12fc93): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:32 crc kubenswrapper[4763]: E1206 08:26:32.831544 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 06 08:26:32 crc kubenswrapper[4763]: E1206 08:26:32.832005 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tjcd7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-nrmpg_openstack-operators(2e2c64e0-cee9-47bd-afca-2fadeeb61b01): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:32 crc kubenswrapper[4763]: E1206 08:26:32.897856 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/openstack-k8s-operators/watcher-operator:c5db5f3745b299172c3afa08fe2fc016acfe1ccc" Dec 06 08:26:32 crc kubenswrapper[4763]: E1206 08:26:32.897944 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/openstack-k8s-operators/watcher-operator:c5db5f3745b299172c3afa08fe2fc016acfe1ccc" Dec 06 08:26:32 crc kubenswrapper[4763]: E1206 08:26:32.898173 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.156:5001/openstack-k8s-operators/watcher-operator:c5db5f3745b299172c3afa08fe2fc016acfe1ccc,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r8dnr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7d48f48f5f-4rgh7_openstack-operators(fc8f06be-5292-423d-bba6-e50068054197): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:34 crc kubenswrapper[4763]: E1206 08:26:34.333588 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 06 08:26:34 crc kubenswrapper[4763]: E1206 08:26:34.333988 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7vz87,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-78qkv_openstack-operators(ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:34 crc kubenswrapper[4763]: I1206 08:26:34.806008 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp"] Dec 06 08:26:38 crc kubenswrapper[4763]: W1206 08:26:38.193158 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9e0900d_f8fd_4cf1_a38a_5b979ec35d5c.slice/crio-2f2cda0c2d8df072cc1ae4eec51b69cf230648d01470d845af409f1523efa1e6 WatchSource:0}: Error finding container 2f2cda0c2d8df072cc1ae4eec51b69cf230648d01470d845af409f1523efa1e6: Status 404 returned error can't find the container with id 2f2cda0c2d8df072cc1ae4eec51b69cf230648d01470d845af409f1523efa1e6 Dec 06 08:26:38 crc kubenswrapper[4763]: I1206 08:26:38.459797 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v"] Dec 06 08:26:38 crc kubenswrapper[4763]: I1206 08:26:38.721351 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:26:38 crc kubenswrapper[4763]: W1206 08:26:38.795152 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e707b64_79d0_4401_9401_a80ed24a9658.slice/crio-865f21722f1b349f88e92800817a97938d25638c7bf1fdfc5d94137db1b5c929 WatchSource:0}: Error finding container 865f21722f1b349f88e92800817a97938d25638c7bf1fdfc5d94137db1b5c929: Status 404 returned error can't find the container with id 865f21722f1b349f88e92800817a97938d25638c7bf1fdfc5d94137db1b5c929 Dec 06 08:26:38 crc kubenswrapper[4763]: W1206 08:26:38.808773 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode50325ec_2cee_48c6_966a_26a08a87806e.slice/crio-eb144e0b95a1c8b5cf4db05019c6d731cf7c51a85e32e8b07bff7b3273640831 WatchSource:0}: Error finding container eb144e0b95a1c8b5cf4db05019c6d731cf7c51a85e32e8b07bff7b3273640831: Status 404 returned error can't find the container with id eb144e0b95a1c8b5cf4db05019c6d731cf7c51a85e32e8b07bff7b3273640831 Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.142348 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" event={"ID":"ff46659d-6be0-4f7b-81a8-f8de0b6331ae","Type":"ContainerStarted","Data":"d8b4b91492f6d9b88aac1600a596cdee0f41d0d60cc08f7d787e0975c6929246"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.153769 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" event={"ID":"1cff2610-ab42-4f8d-8e4a-22218c0f30e0","Type":"ContainerStarted","Data":"4548e399e68089f32f794104056745b3b1e3944f474626391f35f70077e6fd42"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.156331 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" event={"ID":"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5","Type":"ContainerStarted","Data":"d2c3f7649e3b0ec5e38aa9f3748cfe6a6c92613dc40b0d96d09925bc7a2a3a1d"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.157798 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" event={"ID":"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b","Type":"ContainerStarted","Data":"a3f8bd443cdbff142b6e994806b39028bbd38ee3f92891ac797d15f62b893ccb"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.158760 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" event={"ID":"3e707b64-79d0-4401-9401-a80ed24a9658","Type":"ContainerStarted","Data":"865f21722f1b349f88e92800817a97938d25638c7bf1fdfc5d94137db1b5c929"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.160058 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerStarted","Data":"eb144e0b95a1c8b5cf4db05019c6d731cf7c51a85e32e8b07bff7b3273640831"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.161063 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" event={"ID":"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c","Type":"ContainerStarted","Data":"2f2cda0c2d8df072cc1ae4eec51b69cf230648d01470d845af409f1523efa1e6"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.162188 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" event={"ID":"88e79272-2e99-462a-b29c-b4d2a34ed95b","Type":"ContainerStarted","Data":"f5478a51b7c269530f5391ed277ddb07e828970ab9c4493cea8a8b2529d755c9"} Dec 06 08:26:39 crc kubenswrapper[4763]: I1206 08:26:39.163629 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" event={"ID":"a6e1401e-85a2-4477-96d2-58acbc583139","Type":"ContainerStarted","Data":"61214529cf6ad8d1361887ba68a8ae50c9c272ce5c6acadd49ea389c643bdb41"} Dec 06 08:26:40 crc kubenswrapper[4763]: I1206 08:26:40.193590 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" event={"ID":"5674dbca-4697-4993-888b-680428fba7ba","Type":"ContainerStarted","Data":"fac11e5e05ebe754595f64f5d148df0a7155b8a9bc6704aba32d9d3706b6df22"} Dec 06 08:26:40 crc kubenswrapper[4763]: I1206 08:26:40.208016 4763 generic.go:334] "Generic (PLEG): container finished" podID="e50325ec-2cee-48c6-966a-26a08a87806e" containerID="26a57aa5676e34c7cfa733fcc2b0db1e628a5d40d20e4e9bd938777fdfb9c2c1" exitCode=0 Dec 06 08:26:40 crc kubenswrapper[4763]: I1206 08:26:40.208260 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerDied","Data":"26a57aa5676e34c7cfa733fcc2b0db1e628a5d40d20e4e9bd938777fdfb9c2c1"} Dec 06 08:26:40 crc kubenswrapper[4763]: I1206 08:26:40.212106 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" event={"ID":"e91554af-5d2b-4477-be5e-314a9b6e901d","Type":"ContainerStarted","Data":"17a5ee97627376ab277f4af2cb27067d44e0eaa22204e198feaf1f47a7f92560"} Dec 06 08:26:40 crc kubenswrapper[4763]: I1206 08:26:40.225541 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" event={"ID":"3d7a6c13-0b20-44ac-afb5-6d67630877eb","Type":"ContainerStarted","Data":"bab4aef2d61ea63c9b768006cc61ac6c578037c256109a38b3550968a8794248"} Dec 06 08:26:41 crc kubenswrapper[4763]: I1206 08:26:41.239329 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" event={"ID":"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead","Type":"ContainerStarted","Data":"a22dc470d0b0b44dc5322ff412e83f278d9c9dbe5ab21c77b4f54bd6154eedbf"} Dec 06 08:26:50 crc kubenswrapper[4763]: I1206 08:26:42.038910 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:50 crc kubenswrapper[4763]: I1206 08:26:42.039215 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:50 crc kubenswrapper[4763]: I1206 08:26:42.044858 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-webhook-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:50 crc kubenswrapper[4763]: I1206 08:26:42.045406 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c81486b2-5d29-4032-9db8-8f8266846f74-metrics-certs\") pod \"openstack-operator-controller-manager-5b68f46455-24xn4\" (UID: \"c81486b2-5d29-4032-9db8-8f8266846f74\") " pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:50 crc kubenswrapper[4763]: I1206 08:26:42.188255 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.253381 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.254012 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nhq47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-dbttk_openstack-operators(3d914ebd-1d7f-405f-aa0c-c8b254ec7196): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.256086 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" podUID="3d914ebd-1d7f-405f-aa0c-c8b254ec7196" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.306251 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.306779 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tjcd7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-nrmpg_openstack-operators(2e2c64e0-cee9-47bd-afca-2fadeeb61b01): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.308067 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" podUID="2e2c64e0-cee9-47bd-afca-2fadeeb61b01" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.312110 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.312201 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5z85s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-vzhvh_openstack-operators(e4139d53-17de-4e12-a43a-3f571154e203): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.313883 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" podUID="e4139d53-17de-4e12-a43a-3f571154e203" Dec 06 08:26:54 crc kubenswrapper[4763]: I1206 08:26:54.332733 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" event={"ID":"1aa32609-8006-42dd-94d3-0340547ed370","Type":"ContainerStarted","Data":"21fc192b8aeb56f2fdd0d60e96d625390662a420acaa86dbe963f8ed4d1f9593"} Dec 06 08:26:54 crc kubenswrapper[4763]: I1206 08:26:54.334215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" event={"ID":"b9daf37b-2ddd-4324-98d5-ab782c45de9a","Type":"ContainerStarted","Data":"f5a8731b0c359989938f0d868e52d437426dd9093919f5022dc92b2cd043d062"} Dec 06 08:26:54 crc kubenswrapper[4763]: I1206 08:26:54.365264 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dsc64" podStartSLOduration=18.219435328 podStartE2EDuration="45.365231568s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.699354871 +0000 UTC m=+854.275059909" lastFinishedPulling="2025-12-06 08:26:38.845151111 +0000 UTC m=+881.420856149" observedRunningTime="2025-12-06 08:26:54.362949427 +0000 UTC m=+896.938654485" watchObservedRunningTime="2025-12-06 08:26:54.365231568 +0000 UTC m=+896.940936606" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.413956 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.414124 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qqrhd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-dcrcf_openstack-operators(c245a4f1-0cf3-4627-ad45-ce24db12fc93): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.415529 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" podUID="c245a4f1-0cf3-4627-ad45-ce24db12fc93" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.505217 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.505378 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r8dnr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7d48f48f5f-4rgh7_openstack-operators(fc8f06be-5292-423d-bba6-e50068054197): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.506539 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" podUID="fc8f06be-5292-423d-bba6-e50068054197" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.511500 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.511659 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-np798,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-h9ss4_openstack-operators(cd89c4f2-cf50-4183-b364-d4886b5369a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.512821 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" podUID="cd89c4f2-cf50-4183-b364-d4886b5369a6" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.610042 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.610204 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7vz87,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-78qkv_openstack-operators(ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.611527 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" podUID="ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.716983 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.717137 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5jsll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-fkptp_openstack-operators(43a9b006-b703-46ad-a74b-f00752e25fdc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.718312 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" podUID="43a9b006-b703-46ad-a74b-f00752e25fdc" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.937762 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:ccc60d56d8efc2e91a7d8a7131eb7e06c189c32247f2a819818c084ba2e2f2ab" Dec 06 08:26:54 crc kubenswrapper[4763]: E1206 08:26:54.937986 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:ccc60d56d8efc2e91a7d8a7131eb7e06c189c32247f2a819818c084ba2e2f2ab,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cv5qk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-78d48bff9d-kj44v_openstack-operators(3e707b64-79d0-4401-9401-a80ed24a9658): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.572298 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.574160 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fz2kc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-rb7dw_openstack-operators(a6e1401e-85a2-4477-96d2-58acbc583139): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.574189 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.574381 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xls9p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-bvdzz_openstack-operators(7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.574691 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.574772 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h9zdf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-jjs5h_openstack-operators(88e79272-2e99-462a-b29c-b4d2a34ed95b): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.575974 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" podUID="a6e1401e-85a2-4477-96d2-58acbc583139" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.576005 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.575999 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" podUID="88e79272-2e99-462a-b29c-b4d2a34ed95b" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.576073 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" podUID="7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.576146 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j8p2r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-s7cj4_openstack-operators(ff46659d-6be0-4f7b-81a8-f8de0b6331ae): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.576448 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.576555 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wxlk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-ngktl_openstack-operators(5674dbca-4697-4993-888b-680428fba7ba): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.577412 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" podUID="ff46659d-6be0-4f7b-81a8-f8de0b6331ae" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.577428 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.577536 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hcsmj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-t2vdt_openstack-operators(bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.577669 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" podUID="5674dbca-4697-4993-888b-680428fba7ba" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.577959 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.578311 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l57sw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp_openstack-operators(f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.579066 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" podUID="bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.579619 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.579696 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7pm8c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-j4fds_openstack-operators(3d7a6c13-0b20-44ac-afb5-6d67630877eb): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.579729 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.579778 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7dd4d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-svncl_openstack-operators(1cff2610-ab42-4f8d-8e4a-22218c0f30e0): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.580755 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" podUID="3d7a6c13-0b20-44ac-afb5-6d67630877eb" Dec 06 08:26:55 crc kubenswrapper[4763]: E1206 08:26:55.580945 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" podUID="1cff2610-ab42-4f8d-8e4a-22218c0f30e0" Dec 06 08:26:55 crc kubenswrapper[4763]: I1206 08:26:55.994032 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4"] Dec 06 08:26:56 crc kubenswrapper[4763]: W1206 08:26:56.006277 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc81486b2_5d29_4032_9db8_8f8266846f74.slice/crio-a5d5c33937ced690541cb9a6ba6d7bca29a2a7767bdd3bb30699f1b4d02f4865 WatchSource:0}: Error finding container a5d5c33937ced690541cb9a6ba6d7bca29a2a7767bdd3bb30699f1b4d02f4865: Status 404 returned error can't find the container with id a5d5c33937ced690541cb9a6ba6d7bca29a2a7767bdd3bb30699f1b4d02f4865 Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.369789 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" event={"ID":"c245a4f1-0cf3-4627-ad45-ce24db12fc93","Type":"ContainerStarted","Data":"abaeed39b699e40d70219c7b52643265f7d119af0f1d61115ab8907f0699dbb8"} Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.371387 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" event={"ID":"fc8f06be-5292-423d-bba6-e50068054197","Type":"ContainerStarted","Data":"49b54c873c1784b597b40377d59e1053c89820f502b3b07cc1613593598ef636"} Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.372466 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" event={"ID":"c81486b2-5d29-4032-9db8-8f8266846f74","Type":"ContainerStarted","Data":"a5d5c33937ced690541cb9a6ba6d7bca29a2a7767bdd3bb30699f1b4d02f4865"} Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.374923 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.374953 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.376444 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" Dec 06 08:26:56 crc kubenswrapper[4763]: I1206 08:26:56.377348 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.427227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" event={"ID":"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83","Type":"ContainerStarted","Data":"5699a37315be270a433745e8791ae96dc42e59babb51a9d28142bf8f74b2effa"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.440974 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" event={"ID":"e4139d53-17de-4e12-a43a-3f571154e203","Type":"ContainerStarted","Data":"71f231428b50a7bce3be167796cc1b26921a9e1a70d7d4774d81aee5171ec051"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.442517 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" event={"ID":"c81486b2-5d29-4032-9db8-8f8266846f74","Type":"ContainerStarted","Data":"9a26ed66eca899415f49774f3dd5ee3f664de7b2dd380688053fb72753bbd198"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.445389 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" event={"ID":"3d914ebd-1d7f-405f-aa0c-c8b254ec7196","Type":"ContainerStarted","Data":"5c60ba0d6ff8bcd6862818084eac704935a14e37d586842256f346a73142ed84"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.447684 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" event={"ID":"e91554af-5d2b-4477-be5e-314a9b6e901d","Type":"ContainerStarted","Data":"01a880dd36f7ca1b40177f13870dc13b9b2977567ce09c2b1abdedf3e8607e90"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.448610 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.450155 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" event={"ID":"1aa32609-8006-42dd-94d3-0340547ed370","Type":"ContainerStarted","Data":"895222621c6af4749889070ae71c11a06fcb4d26702542484adca370beb33179"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.450667 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.456339 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.461583 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" event={"ID":"43a9b006-b703-46ad-a74b-f00752e25fdc","Type":"ContainerStarted","Data":"ad236122b3e24cda8b73f185e0ccd45089b905712ce640f8434445b5ee41aab5"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.469005 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" event={"ID":"94ab8de3-6887-460c-a3c0-d0cf4dcf4ead","Type":"ContainerStarted","Data":"8ec247cd40a3712d33636c90352727c2c407d97e48d0ee55b8b09de5a19aa84c"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.469812 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.481977 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.483113 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" event={"ID":"cd89c4f2-cf50-4183-b364-d4886b5369a6","Type":"ContainerStarted","Data":"38e9cf94c511c89217f389c697dcb2fab4701294aa5f29c7f57cc614f093496c"} Dec 06 08:26:57 crc kubenswrapper[4763]: E1206 08:26:57.488324 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" podUID="f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.506703 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" podStartSLOduration=48.50668199 podStartE2EDuration="48.50668199s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:26:57.488811229 +0000 UTC m=+900.064516267" watchObservedRunningTime="2025-12-06 08:26:57.50668199 +0000 UTC m=+900.082387028" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.520976 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" event={"ID":"2e2c64e0-cee9-47bd-afca-2fadeeb61b01","Type":"ContainerStarted","Data":"789435bf489e6cdf272309fa948ff8b814a321209fcf45f0bdcc7f65203e1485"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.524927 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" podStartSLOduration=4.3550768810000005 podStartE2EDuration="48.52489118s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.690943795 +0000 UTC m=+854.266648833" lastFinishedPulling="2025-12-06 08:26:55.860758094 +0000 UTC m=+898.436463132" observedRunningTime="2025-12-06 08:26:57.513344759 +0000 UTC m=+900.089049797" watchObservedRunningTime="2025-12-06 08:26:57.52489118 +0000 UTC m=+900.100596218" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.528973 4763 generic.go:334] "Generic (PLEG): container finished" podID="e50325ec-2cee-48c6-966a-26a08a87806e" containerID="a7c32d6100748e009dd3cef5be1e1ffdf38c0e64ebb6939a7aaead8379dbd146" exitCode=0 Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.529242 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerDied","Data":"a7c32d6100748e009dd3cef5be1e1ffdf38c0e64ebb6939a7aaead8379dbd146"} Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.567309 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zhxph" podStartSLOduration=4.256929527 podStartE2EDuration="48.567291161s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.733816739 +0000 UTC m=+854.309521777" lastFinishedPulling="2025-12-06 08:26:56.044178373 +0000 UTC m=+898.619883411" observedRunningTime="2025-12-06 08:26:57.547880139 +0000 UTC m=+900.123585177" watchObservedRunningTime="2025-12-06 08:26:57.567291161 +0000 UTC m=+900.142996189" Dec 06 08:26:57 crc kubenswrapper[4763]: I1206 08:26:57.680282 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-h47jh" podStartSLOduration=4.177313163 podStartE2EDuration="48.680257263s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.530812093 +0000 UTC m=+854.106517131" lastFinishedPulling="2025-12-06 08:26:56.033756183 +0000 UTC m=+898.609461231" observedRunningTime="2025-12-06 08:26:57.664144779 +0000 UTC m=+900.239849817" watchObservedRunningTime="2025-12-06 08:26:57.680257263 +0000 UTC m=+900.255962311" Dec 06 08:26:58 crc kubenswrapper[4763]: E1206 08:26:58.153450 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" podUID="3e707b64-79d0-4401-9401-a80ed24a9658" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.730227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" event={"ID":"2e2c64e0-cee9-47bd-afca-2fadeeb61b01","Type":"ContainerStarted","Data":"d41211bd64755caaaa105bd102afb313e76e86b2d7d5740b80bb90ef93abff38"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.731444 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.755351 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" event={"ID":"3d7a6c13-0b20-44ac-afb5-6d67630877eb","Type":"ContainerStarted","Data":"1ea4943b19a64fa1ec727f5fa94af0f14dd78c88b8c9451ba98d621b5f032954"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.757620 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.760774 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" podStartSLOduration=4.785397245 podStartE2EDuration="49.760759694s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.029438424 +0000 UTC m=+853.605143462" lastFinishedPulling="2025-12-06 08:26:56.004800873 +0000 UTC m=+898.580505911" observedRunningTime="2025-12-06 08:26:58.758884644 +0000 UTC m=+901.334589672" watchObservedRunningTime="2025-12-06 08:26:58.760759694 +0000 UTC m=+901.336464732" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.765538 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.778889 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" event={"ID":"a6e1401e-85a2-4477-96d2-58acbc583139","Type":"ContainerStarted","Data":"379a2a6d47f8bdd295fe6b15f1ad8610f65b31328f2c3da96fc52f911de81d35"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.779787 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.796489 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.806832 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-j4fds" podStartSLOduration=28.061451907 podStartE2EDuration="49.806815194s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.141413929 +0000 UTC m=+853.717118967" lastFinishedPulling="2025-12-06 08:26:32.886777216 +0000 UTC m=+875.462482254" observedRunningTime="2025-12-06 08:26:58.805403356 +0000 UTC m=+901.381108394" watchObservedRunningTime="2025-12-06 08:26:58.806815194 +0000 UTC m=+901.382520232" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.813051 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" event={"ID":"1cff2610-ab42-4f8d-8e4a-22218c0f30e0","Type":"ContainerStarted","Data":"f3f4dbb2fb22f05e75506bfa324296fcff3eeff60115d1cf1d65128cff19a59c"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.842093 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" event={"ID":"e4139d53-17de-4e12-a43a-3f571154e203","Type":"ContainerStarted","Data":"7dd1a926b08cd9b229ed30f7cd99c74bedc7650d77a31f7781ebbb6aa516f8bd"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.842779 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.886599 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" event={"ID":"bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5","Type":"ContainerStarted","Data":"e4675d2c682033440f2f0c1f111513188ce7ffa781f7bd46efddd81f7c0b9b8f"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.888269 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.907123 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-rb7dw" podStartSLOduration=28.348243979 podStartE2EDuration="49.907106025s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.252181331 +0000 UTC m=+853.827886379" lastFinishedPulling="2025-12-06 08:26:32.811043397 +0000 UTC m=+875.386748425" observedRunningTime="2025-12-06 08:26:58.905388929 +0000 UTC m=+901.481093967" watchObservedRunningTime="2025-12-06 08:26:58.907106025 +0000 UTC m=+901.482811063" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.927850 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" event={"ID":"cd89c4f2-cf50-4183-b364-d4886b5369a6","Type":"ContainerStarted","Data":"95dc2693560c490bbe29adebf15761c5f7815c85ce39b677b4312e3cd33b93d0"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.928577 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.935670 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.962458 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" podStartSLOduration=5.562463698 podStartE2EDuration="49.962439444s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.327291804 +0000 UTC m=+853.902996842" lastFinishedPulling="2025-12-06 08:26:55.72726755 +0000 UTC m=+898.302972588" observedRunningTime="2025-12-06 08:26:58.959778904 +0000 UTC m=+901.535483942" watchObservedRunningTime="2025-12-06 08:26:58.962439444 +0000 UTC m=+901.538144482" Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.973293 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" event={"ID":"88e79272-2e99-462a-b29c-b4d2a34ed95b","Type":"ContainerStarted","Data":"bc0dd32ab9a63d29387da87b037d0c8da49d4928b4f2721a6029f8658884fa7f"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.993026 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" event={"ID":"ff46659d-6be0-4f7b-81a8-f8de0b6331ae","Type":"ContainerStarted","Data":"30839405ba44479a077f5995ef327be45c821fcbf5c42b434ad04d36bd82e708"} Dec 06 08:26:58 crc kubenswrapper[4763]: I1206 08:26:58.993974 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.035039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" event={"ID":"ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83","Type":"ContainerStarted","Data":"39f07df18a2de212fea5585ce9750bda019f8503aeacff33d8bea79682e0680d"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.035814 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.038877 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-svncl" podStartSLOduration=28.591329663 podStartE2EDuration="50.038867572s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.363490838 +0000 UTC m=+853.939195876" lastFinishedPulling="2025-12-06 08:26:32.811028747 +0000 UTC m=+875.386733785" observedRunningTime="2025-12-06 08:26:59.03618796 +0000 UTC m=+901.611892998" watchObservedRunningTime="2025-12-06 08:26:59.038867572 +0000 UTC m=+901.614572610" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.048271 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.055153 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" event={"ID":"fc8f06be-5292-423d-bba6-e50068054197","Type":"ContainerStarted","Data":"73955e5396304bee30aacf24b21b342e15819be17f8833ac1f92eb0936c92dc0"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.055770 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.069801 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-t2vdt" podStartSLOduration=29.043963237 podStartE2EDuration="51.069782735s" podCreationTimestamp="2025-12-06 08:26:08 +0000 UTC" firstStartedPulling="2025-12-06 08:26:10.287015945 +0000 UTC m=+852.862720983" lastFinishedPulling="2025-12-06 08:26:32.312835433 +0000 UTC m=+874.888540481" observedRunningTime="2025-12-06 08:26:59.062311364 +0000 UTC m=+901.638016402" watchObservedRunningTime="2025-12-06 08:26:59.069782735 +0000 UTC m=+901.645487773" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.075294 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" event={"ID":"43a9b006-b703-46ad-a74b-f00752e25fdc","Type":"ContainerStarted","Data":"a07099d146fd9823d1293d776170f919207b136253d36e29b80aadee2d37e328"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.075963 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.084730 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" event={"ID":"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c","Type":"ContainerStarted","Data":"74c09e24fb45ce6d5b12c6334e15715c81f2067a6a081419ccf9f7c5116f6722"} Dec 06 08:26:59 crc kubenswrapper[4763]: E1206 08:26:59.101221 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" podUID="f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.127121 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" event={"ID":"3d914ebd-1d7f-405f-aa0c-c8b254ec7196","Type":"ContainerStarted","Data":"5ba2e4564b5942a7cc8f8568621f22146ef936a439c22d8cd9b949d70dd5dbf8"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.127788 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.155061 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" event={"ID":"3e707b64-79d0-4401-9401-a80ed24a9658","Type":"ContainerStarted","Data":"3d25351ad2328ba84648744e481455a1eb39be0d7608ceedd5edd55bc26830b5"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.156092 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" podStartSLOduration=5.502512144 podStartE2EDuration="50.156074708s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.424693356 +0000 UTC m=+854.000398394" lastFinishedPulling="2025-12-06 08:26:56.07825592 +0000 UTC m=+898.653960958" observedRunningTime="2025-12-06 08:26:59.155615325 +0000 UTC m=+901.731320363" watchObservedRunningTime="2025-12-06 08:26:59.156074708 +0000 UTC m=+901.731779746" Dec 06 08:26:59 crc kubenswrapper[4763]: E1206 08:26:59.170161 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:ccc60d56d8efc2e91a7d8a7131eb7e06c189c32247f2a819818c084ba2e2f2ab\\\"\"" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" podUID="3e707b64-79d0-4401-9401-a80ed24a9658" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.191207 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" event={"ID":"5674dbca-4697-4993-888b-680428fba7ba","Type":"ContainerStarted","Data":"55e502e04f4c37dd53b719a9b98fbf840a51ff8819c86f4e344be66f3115aba5"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.192262 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.203952 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.204333 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" podStartSLOduration=6.031897898 podStartE2EDuration="50.204310597s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.688648383 +0000 UTC m=+854.264353421" lastFinishedPulling="2025-12-06 08:26:55.861061082 +0000 UTC m=+898.436766120" observedRunningTime="2025-12-06 08:26:59.192458028 +0000 UTC m=+901.768163066" watchObservedRunningTime="2025-12-06 08:26:59.204310597 +0000 UTC m=+901.780015635" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.216347 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" event={"ID":"7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b","Type":"ContainerStarted","Data":"4125ce8426fa2e0db2e8aa3aaa8401bbc1f0f586fc7a62fba04875be88c85e25"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.217237 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.221798 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" podStartSLOduration=5.941981932 podStartE2EDuration="51.221786418s" podCreationTimestamp="2025-12-06 08:26:08 +0000 UTC" firstStartedPulling="2025-12-06 08:26:10.755299243 +0000 UTC m=+853.331004281" lastFinishedPulling="2025-12-06 08:26:56.035103719 +0000 UTC m=+898.610808767" observedRunningTime="2025-12-06 08:26:59.221175151 +0000 UTC m=+901.796880189" watchObservedRunningTime="2025-12-06 08:26:59.221786418 +0000 UTC m=+901.797491456" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.233125 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.244025 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" event={"ID":"c245a4f1-0cf3-4627-ad45-ce24db12fc93","Type":"ContainerStarted","Data":"55d735232aaa74d0e9c20142a823f5210b5c6543069012e863857e5a44cccfab"} Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.244075 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.244466 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.255241 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8jwdv" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.257273 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jjs5h" podStartSLOduration=28.724690124 podStartE2EDuration="50.257255342s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:10.780272135 +0000 UTC m=+853.355977173" lastFinishedPulling="2025-12-06 08:26:32.312837353 +0000 UTC m=+874.888542391" observedRunningTime="2025-12-06 08:26:59.256234995 +0000 UTC m=+901.831940033" watchObservedRunningTime="2025-12-06 08:26:59.257255342 +0000 UTC m=+901.832960390" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.308028 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-s7cj4" podStartSLOduration=29.24057955 podStartE2EDuration="51.308007399s" podCreationTimestamp="2025-12-06 08:26:08 +0000 UTC" firstStartedPulling="2025-12-06 08:26:10.245513567 +0000 UTC m=+852.821218605" lastFinishedPulling="2025-12-06 08:26:32.312941416 +0000 UTC m=+874.888646454" observedRunningTime="2025-12-06 08:26:59.294212057 +0000 UTC m=+901.869917095" watchObservedRunningTime="2025-12-06 08:26:59.308007399 +0000 UTC m=+901.883712437" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.366458 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bvdzz" podStartSLOduration=29.225253292 podStartE2EDuration="50.366436992s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.171669654 +0000 UTC m=+853.747374692" lastFinishedPulling="2025-12-06 08:26:32.312853354 +0000 UTC m=+874.888558392" observedRunningTime="2025-12-06 08:26:59.364282684 +0000 UTC m=+901.939987722" watchObservedRunningTime="2025-12-06 08:26:59.366436992 +0000 UTC m=+901.942142030" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.475222 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" podStartSLOduration=5.937836504 podStartE2EDuration="50.475207311s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.487797064 +0000 UTC m=+854.063502102" lastFinishedPulling="2025-12-06 08:26:56.025167841 +0000 UTC m=+898.600872909" observedRunningTime="2025-12-06 08:26:59.471925852 +0000 UTC m=+902.047630890" watchObservedRunningTime="2025-12-06 08:26:59.475207311 +0000 UTC m=+902.050912349" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.499342 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-ngktl" podStartSLOduration=29.946436825 podStartE2EDuration="51.49932177s" podCreationTimestamp="2025-12-06 08:26:08 +0000 UTC" firstStartedPulling="2025-12-06 08:26:10.759935858 +0000 UTC m=+853.335640896" lastFinishedPulling="2025-12-06 08:26:32.312820803 +0000 UTC m=+874.888525841" observedRunningTime="2025-12-06 08:26:59.496212197 +0000 UTC m=+902.071917235" watchObservedRunningTime="2025-12-06 08:26:59.49932177 +0000 UTC m=+902.075026808" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.586183 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" podStartSLOduration=5.89494034 podStartE2EDuration="50.586163868s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.169648759 +0000 UTC m=+853.745353797" lastFinishedPulling="2025-12-06 08:26:55.860872287 +0000 UTC m=+898.436577325" observedRunningTime="2025-12-06 08:26:59.579994481 +0000 UTC m=+902.155699519" watchObservedRunningTime="2025-12-06 08:26:59.586163868 +0000 UTC m=+902.161868906" Dec 06 08:26:59 crc kubenswrapper[4763]: I1206 08:26:59.690191 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" podStartSLOduration=6.157952011 podStartE2EDuration="50.690173538s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:11.503698933 +0000 UTC m=+854.079403971" lastFinishedPulling="2025-12-06 08:26:56.03592045 +0000 UTC m=+898.611625498" observedRunningTime="2025-12-06 08:26:59.629191577 +0000 UTC m=+902.204896615" watchObservedRunningTime="2025-12-06 08:26:59.690173538 +0000 UTC m=+902.265878566" Dec 06 08:27:00 crc kubenswrapper[4763]: I1206 08:27:00.250872 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerStarted","Data":"336e5858a99d6d19979ff0180730c4a9605c1ab59096d82b95df8746807a5425"} Dec 06 08:27:00 crc kubenswrapper[4763]: E1206 08:27:00.253507 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" podUID="f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c" Dec 06 08:27:00 crc kubenswrapper[4763]: E1206 08:27:00.253908 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:ccc60d56d8efc2e91a7d8a7131eb7e06c189c32247f2a819818c084ba2e2f2ab\\\"\"" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" podUID="3e707b64-79d0-4401-9401-a80ed24a9658" Dec 06 08:27:00 crc kubenswrapper[4763]: I1206 08:27:00.300247 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mzzdf" podStartSLOduration=38.727103507 podStartE2EDuration="43.300231874s" podCreationTimestamp="2025-12-06 08:26:17 +0000 UTC" firstStartedPulling="2025-12-06 08:26:53.662738854 +0000 UTC m=+896.238443892" lastFinishedPulling="2025-12-06 08:26:58.235867221 +0000 UTC m=+900.811572259" observedRunningTime="2025-12-06 08:27:00.29861189 +0000 UTC m=+902.874316928" watchObservedRunningTime="2025-12-06 08:27:00.300231874 +0000 UTC m=+902.875936912" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.260690 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fkptp" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.260745 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-vzhvh" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.260772 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nrmpg" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.260797 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-h9ss4" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.260822 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-dbttk" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.261922 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7d48f48f5f-4rgh7" Dec 06 08:27:01 crc kubenswrapper[4763]: I1206 08:27:01.262319 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-78qkv" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.193685 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5b68f46455-24xn4" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.437599 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.439743 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.448074 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.551998 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v9hx\" (UniqueName: \"kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.552051 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.552107 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.653441 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.653537 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v9hx\" (UniqueName: \"kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.653560 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.654023 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.654052 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.671342 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v9hx\" (UniqueName: \"kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx\") pod \"community-operators-9ffvq\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:02 crc kubenswrapper[4763]: I1206 08:27:02.793782 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:03 crc kubenswrapper[4763]: I1206 08:27:03.280069 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:04 crc kubenswrapper[4763]: I1206 08:27:04.281820 4763 generic.go:334] "Generic (PLEG): container finished" podID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerID="36c3bbf6a39d1f1a7aec314e26a2b9339a16d5b4d679008deae79f9c2196c39f" exitCode=0 Dec 06 08:27:04 crc kubenswrapper[4763]: I1206 08:27:04.281886 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerDied","Data":"36c3bbf6a39d1f1a7aec314e26a2b9339a16d5b4d679008deae79f9c2196c39f"} Dec 06 08:27:04 crc kubenswrapper[4763]: I1206 08:27:04.282119 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerStarted","Data":"fb80319c6ea12375e0c7046e87669e40cc4f7e11d0d9f3ad1d0b16fc0e9709ed"} Dec 06 08:27:05 crc kubenswrapper[4763]: I1206 08:27:05.298142 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerStarted","Data":"889f741516d72ab46f4a4c690ed29a4bd2d56cf62b728226a50ae962bd2eef2d"} Dec 06 08:27:06 crc kubenswrapper[4763]: I1206 08:27:06.309680 4763 generic.go:334] "Generic (PLEG): container finished" podID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerID="889f741516d72ab46f4a4c690ed29a4bd2d56cf62b728226a50ae962bd2eef2d" exitCode=0 Dec 06 08:27:06 crc kubenswrapper[4763]: I1206 08:27:06.309725 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerDied","Data":"889f741516d72ab46f4a4c690ed29a4bd2d56cf62b728226a50ae962bd2eef2d"} Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.318435 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerStarted","Data":"dfd89e0d74e0af8f41b3e6ae7698d0e5e54c3d33909a2b2a271ba4b394c82f19"} Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.341231 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9ffvq" podStartSLOduration=2.908044896 podStartE2EDuration="5.341210316s" podCreationTimestamp="2025-12-06 08:27:02 +0000 UTC" firstStartedPulling="2025-12-06 08:27:04.283196472 +0000 UTC m=+906.858901510" lastFinishedPulling="2025-12-06 08:27:06.716361892 +0000 UTC m=+909.292066930" observedRunningTime="2025-12-06 08:27:07.334204477 +0000 UTC m=+909.909909515" watchObservedRunningTime="2025-12-06 08:27:07.341210316 +0000 UTC m=+909.916915364" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.425303 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.427007 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.437827 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.532386 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.532555 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hdgf\" (UniqueName: \"kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.532619 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.603933 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.603971 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.633429 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.633486 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hdgf\" (UniqueName: \"kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.633533 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.633997 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.634269 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.657468 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hdgf\" (UniqueName: \"kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf\") pod \"certified-operators-tptcx\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.658126 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:07 crc kubenswrapper[4763]: I1206 08:27:07.744145 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:08 crc kubenswrapper[4763]: I1206 08:27:08.115933 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:08 crc kubenswrapper[4763]: W1206 08:27:08.119419 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80225f03_b4d5_470c_9bd0_bf1ec8e74583.slice/crio-3534a05a2534a760db9f9609e89cf2095b4e295eb16dadcd42baa3b910a61dc3 WatchSource:0}: Error finding container 3534a05a2534a760db9f9609e89cf2095b4e295eb16dadcd42baa3b910a61dc3: Status 404 returned error can't find the container with id 3534a05a2534a760db9f9609e89cf2095b4e295eb16dadcd42baa3b910a61dc3 Dec 06 08:27:08 crc kubenswrapper[4763]: I1206 08:27:08.326034 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerStarted","Data":"3534a05a2534a760db9f9609e89cf2095b4e295eb16dadcd42baa3b910a61dc3"} Dec 06 08:27:08 crc kubenswrapper[4763]: I1206 08:27:08.382417 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:09 crc kubenswrapper[4763]: I1206 08:27:09.334911 4763 generic.go:334] "Generic (PLEG): container finished" podID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerID="596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765" exitCode=0 Dec 06 08:27:09 crc kubenswrapper[4763]: I1206 08:27:09.334985 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerDied","Data":"596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765"} Dec 06 08:27:10 crc kubenswrapper[4763]: I1206 08:27:10.375561 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dcrcf" Dec 06 08:27:10 crc kubenswrapper[4763]: I1206 08:27:10.416565 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:27:10 crc kubenswrapper[4763]: I1206 08:27:10.416988 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mzzdf" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="registry-server" containerID="cri-o://336e5858a99d6d19979ff0180730c4a9605c1ab59096d82b95df8746807a5425" gracePeriod=2 Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.384318 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerStarted","Data":"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478"} Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.386969 4763 generic.go:334] "Generic (PLEG): container finished" podID="e50325ec-2cee-48c6-966a-26a08a87806e" containerID="336e5858a99d6d19979ff0180730c4a9605c1ab59096d82b95df8746807a5425" exitCode=0 Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.386992 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerDied","Data":"336e5858a99d6d19979ff0180730c4a9605c1ab59096d82b95df8746807a5425"} Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.610333 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.794482 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-225jh\" (UniqueName: \"kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh\") pod \"e50325ec-2cee-48c6-966a-26a08a87806e\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.794985 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities\") pod \"e50325ec-2cee-48c6-966a-26a08a87806e\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.795067 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content\") pod \"e50325ec-2cee-48c6-966a-26a08a87806e\" (UID: \"e50325ec-2cee-48c6-966a-26a08a87806e\") " Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.796350 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities" (OuterVolumeSpecName: "utilities") pod "e50325ec-2cee-48c6-966a-26a08a87806e" (UID: "e50325ec-2cee-48c6-966a-26a08a87806e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.802369 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh" (OuterVolumeSpecName: "kube-api-access-225jh") pod "e50325ec-2cee-48c6-966a-26a08a87806e" (UID: "e50325ec-2cee-48c6-966a-26a08a87806e"). InnerVolumeSpecName "kube-api-access-225jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.816707 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e50325ec-2cee-48c6-966a-26a08a87806e" (UID: "e50325ec-2cee-48c6-966a-26a08a87806e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.897765 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-225jh\" (UniqueName: \"kubernetes.io/projected/e50325ec-2cee-48c6-966a-26a08a87806e-kube-api-access-225jh\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.897816 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:11 crc kubenswrapper[4763]: I1206 08:27:11.897828 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e50325ec-2cee-48c6-966a-26a08a87806e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.397403 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mzzdf" event={"ID":"e50325ec-2cee-48c6-966a-26a08a87806e","Type":"ContainerDied","Data":"eb144e0b95a1c8b5cf4db05019c6d731cf7c51a85e32e8b07bff7b3273640831"} Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.397533 4763 scope.go:117] "RemoveContainer" containerID="336e5858a99d6d19979ff0180730c4a9605c1ab59096d82b95df8746807a5425" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.397606 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mzzdf" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.399874 4763 generic.go:334] "Generic (PLEG): container finished" podID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerID="40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478" exitCode=0 Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.399941 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerDied","Data":"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478"} Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.422732 4763 scope.go:117] "RemoveContainer" containerID="a7c32d6100748e009dd3cef5be1e1ffdf38c0e64ebb6939a7aaead8379dbd146" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.453434 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.459467 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mzzdf"] Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.460333 4763 scope.go:117] "RemoveContainer" containerID="26a57aa5676e34c7cfa733fcc2b0db1e628a5d40d20e4e9bd938777fdfb9c2c1" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.536944 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.536995 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.795092 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.795378 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:12 crc kubenswrapper[4763]: I1206 08:27:12.856528 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:13 crc kubenswrapper[4763]: I1206 08:27:13.458075 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:13 crc kubenswrapper[4763]: I1206 08:27:13.732476 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" path="/var/lib/kubelet/pods/e50325ec-2cee-48c6-966a-26a08a87806e/volumes" Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.417050 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerStarted","Data":"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7"} Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.418465 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" event={"ID":"3e707b64-79d0-4401-9401-a80ed24a9658","Type":"ContainerStarted","Data":"a980ee45a9d4d6472e3b788ee2488da66ca798e5e7025f4e479cfffb1855ff5e"} Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.418641 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.420018 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" event={"ID":"f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c","Type":"ContainerStarted","Data":"6be2322036258714c6d27a309a2a1d79a2e16ed3db97f0f41c1efea03c765065"} Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.420302 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.434424 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tptcx" podStartSLOduration=3.399273382 podStartE2EDuration="7.434408295s" podCreationTimestamp="2025-12-06 08:27:07 +0000 UTC" firstStartedPulling="2025-12-06 08:27:09.336440856 +0000 UTC m=+911.912145884" lastFinishedPulling="2025-12-06 08:27:13.371575759 +0000 UTC m=+915.947280797" observedRunningTime="2025-12-06 08:27:14.432567455 +0000 UTC m=+917.008272503" watchObservedRunningTime="2025-12-06 08:27:14.434408295 +0000 UTC m=+917.010113333" Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.453335 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" podStartSLOduration=30.092002638 podStartE2EDuration="1m5.453314494s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:38.803573552 +0000 UTC m=+881.379278580" lastFinishedPulling="2025-12-06 08:27:14.164885398 +0000 UTC m=+916.740590436" observedRunningTime="2025-12-06 08:27:14.449742768 +0000 UTC m=+917.025447806" watchObservedRunningTime="2025-12-06 08:27:14.453314494 +0000 UTC m=+917.029019532" Dec 06 08:27:14 crc kubenswrapper[4763]: I1206 08:27:14.477616 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" podStartSLOduration=29.555459612 podStartE2EDuration="1m5.477592328s" podCreationTimestamp="2025-12-06 08:26:09 +0000 UTC" firstStartedPulling="2025-12-06 08:26:38.245030964 +0000 UTC m=+880.820735992" lastFinishedPulling="2025-12-06 08:27:14.16716367 +0000 UTC m=+916.742868708" observedRunningTime="2025-12-06 08:27:14.474410462 +0000 UTC m=+917.050115500" watchObservedRunningTime="2025-12-06 08:27:14.477592328 +0000 UTC m=+917.053297366" Dec 06 08:27:15 crc kubenswrapper[4763]: I1206 08:27:15.816557 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:16 crc kubenswrapper[4763]: I1206 08:27:16.432002 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9ffvq" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="registry-server" containerID="cri-o://dfd89e0d74e0af8f41b3e6ae7698d0e5e54c3d33909a2b2a271ba4b394c82f19" gracePeriod=2 Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.440195 4763 generic.go:334] "Generic (PLEG): container finished" podID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerID="dfd89e0d74e0af8f41b3e6ae7698d0e5e54c3d33909a2b2a271ba4b394c82f19" exitCode=0 Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.440245 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerDied","Data":"dfd89e0d74e0af8f41b3e6ae7698d0e5e54c3d33909a2b2a271ba4b394c82f19"} Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.744439 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.744845 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.791635 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:17 crc kubenswrapper[4763]: I1206 08:27:17.972315 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.078603 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content\") pod \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.078654 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v9hx\" (UniqueName: \"kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx\") pod \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.078693 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities\") pod \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\" (UID: \"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226\") " Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.080009 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities" (OuterVolumeSpecName: "utilities") pod "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" (UID: "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.090833 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx" (OuterVolumeSpecName: "kube-api-access-8v9hx") pod "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" (UID: "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226"). InnerVolumeSpecName "kube-api-access-8v9hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.128918 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" (UID: "c27cc0d8-ed3e-437a-ac48-7a44cd4fd226"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.180000 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.180040 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v9hx\" (UniqueName: \"kubernetes.io/projected/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-kube-api-access-8v9hx\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.180051 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.450102 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ffvq" event={"ID":"c27cc0d8-ed3e-437a-ac48-7a44cd4fd226","Type":"ContainerDied","Data":"fb80319c6ea12375e0c7046e87669e40cc4f7e11d0d9f3ad1d0b16fc0e9709ed"} Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.450132 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ffvq" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.450156 4763 scope.go:117] "RemoveContainer" containerID="dfd89e0d74e0af8f41b3e6ae7698d0e5e54c3d33909a2b2a271ba4b394c82f19" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.469131 4763 scope.go:117] "RemoveContainer" containerID="889f741516d72ab46f4a4c690ed29a4bd2d56cf62b728226a50ae962bd2eef2d" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.486984 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.493877 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9ffvq"] Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.497436 4763 scope.go:117] "RemoveContainer" containerID="36c3bbf6a39d1f1a7aec314e26a2b9339a16d5b4d679008deae79f9c2196c39f" Dec 06 08:27:18 crc kubenswrapper[4763]: I1206 08:27:18.499844 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:19 crc kubenswrapper[4763]: I1206 08:27:19.735259 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" path="/var/lib/kubelet/pods/c27cc0d8-ed3e-437a-ac48-7a44cd4fd226/volumes" Dec 06 08:27:20 crc kubenswrapper[4763]: I1206 08:27:20.217880 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:20 crc kubenswrapper[4763]: I1206 08:27:20.465168 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tptcx" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="registry-server" containerID="cri-o://9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7" gracePeriod=2 Dec 06 08:27:20 crc kubenswrapper[4763]: I1206 08:27:20.853227 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:20 crc kubenswrapper[4763]: E1206 08:27:20.955828 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.015597 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities\") pod \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.015756 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hdgf\" (UniqueName: \"kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf\") pod \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.015786 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content\") pod \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\" (UID: \"80225f03-b4d5-470c-9bd0-bf1ec8e74583\") " Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.016663 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities" (OuterVolumeSpecName: "utilities") pod "80225f03-b4d5-470c-9bd0-bf1ec8e74583" (UID: "80225f03-b4d5-470c-9bd0-bf1ec8e74583"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.022099 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf" (OuterVolumeSpecName: "kube-api-access-8hdgf") pod "80225f03-b4d5-470c-9bd0-bf1ec8e74583" (UID: "80225f03-b4d5-470c-9bd0-bf1ec8e74583"). InnerVolumeSpecName "kube-api-access-8hdgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.068310 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80225f03-b4d5-470c-9bd0-bf1ec8e74583" (UID: "80225f03-b4d5-470c-9bd0-bf1ec8e74583"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.117843 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hdgf\" (UniqueName: \"kubernetes.io/projected/80225f03-b4d5-470c-9bd0-bf1ec8e74583-kube-api-access-8hdgf\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.117888 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.117924 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80225f03-b4d5-470c-9bd0-bf1ec8e74583-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.475188 4763 generic.go:334] "Generic (PLEG): container finished" podID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerID="9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7" exitCode=0 Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.475235 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerDied","Data":"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7"} Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.475266 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tptcx" event={"ID":"80225f03-b4d5-470c-9bd0-bf1ec8e74583","Type":"ContainerDied","Data":"3534a05a2534a760db9f9609e89cf2095b4e295eb16dadcd42baa3b910a61dc3"} Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.475286 4763 scope.go:117] "RemoveContainer" containerID="9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.475428 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tptcx" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.511350 4763 scope.go:117] "RemoveContainer" containerID="40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.512062 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.517275 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tptcx"] Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.538275 4763 scope.go:117] "RemoveContainer" containerID="596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.586198 4763 scope.go:117] "RemoveContainer" containerID="9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7" Dec 06 08:27:21 crc kubenswrapper[4763]: E1206 08:27:21.586638 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7\": container with ID starting with 9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7 not found: ID does not exist" containerID="9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.586665 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7"} err="failed to get container status \"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7\": rpc error: code = NotFound desc = could not find container \"9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7\": container with ID starting with 9422a198a2c05da1a1ddb5372830dc37d4b2bf629601a41805cf68e3c013cfd7 not found: ID does not exist" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.586684 4763 scope.go:117] "RemoveContainer" containerID="40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478" Dec 06 08:27:21 crc kubenswrapper[4763]: E1206 08:27:21.587030 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478\": container with ID starting with 40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478 not found: ID does not exist" containerID="40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.587059 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478"} err="failed to get container status \"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478\": rpc error: code = NotFound desc = could not find container \"40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478\": container with ID starting with 40b44db6a4d1d508c327e68c7bb885d8ee74e88c3cd5d5c3c688cc8f2652e478 not found: ID does not exist" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.587086 4763 scope.go:117] "RemoveContainer" containerID="596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765" Dec 06 08:27:21 crc kubenswrapper[4763]: E1206 08:27:21.587315 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765\": container with ID starting with 596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765 not found: ID does not exist" containerID="596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.587338 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765"} err="failed to get container status \"596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765\": rpc error: code = NotFound desc = could not find container \"596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765\": container with ID starting with 596733269ed396ab105025f56ed4ee5e401234386cd9570e39ef1256dff4f765 not found: ID does not exist" Dec 06 08:27:21 crc kubenswrapper[4763]: I1206 08:27:21.731521 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" path="/var/lib/kubelet/pods/80225f03-b4d5-470c-9bd0-bf1ec8e74583/volumes" Dec 06 08:27:25 crc kubenswrapper[4763]: I1206 08:27:25.489669 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-kj44v" Dec 06 08:27:25 crc kubenswrapper[4763]: I1206 08:27:25.934756 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.536437 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.537046 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.752425 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.752767 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.752788 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.752821 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.752829 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.752856 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.752867 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.752883 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.752986 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.753003 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753011 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.753029 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753036 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.753047 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753054 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="extract-utilities" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.753068 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753075 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: E1206 08:27:42.753088 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753095 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="extract-content" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753280 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50325ec-2cee-48c6-966a-26a08a87806e" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753298 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c27cc0d8-ed3e-437a-ac48-7a44cd4fd226" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.753309 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="80225f03-b4d5-470c-9bd0-bf1ec8e74583" containerName="registry-server" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.754269 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.764117 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.765294 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.765396 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-rqjxb" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.765462 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.768924 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.829362 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.832778 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.835575 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.838539 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.902114 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4cnw\" (UniqueName: \"kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.902176 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8lxs\" (UniqueName: \"kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.902234 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.902256 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:42 crc kubenswrapper[4763]: I1206 08:27:42.902337 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.003786 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.003843 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.003930 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.003966 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4cnw\" (UniqueName: \"kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.003992 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8lxs\" (UniqueName: \"kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.005012 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.005020 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.005023 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.021784 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4cnw\" (UniqueName: \"kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw\") pod \"dnsmasq-dns-6f78fc7c85-ggz7g\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.021784 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8lxs\" (UniqueName: \"kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs\") pod \"dnsmasq-dns-5bbff789c7-257zl\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.079124 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.150825 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.460344 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:27:43 crc kubenswrapper[4763]: W1206 08:27:43.461667 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod862af203_fa4c_4c9c_a509_3ece19348075.slice/crio-376f59049462a67d06f968aebee2f5c3b4dd7b0afd6877098e75f9981c1bc4c3 WatchSource:0}: Error finding container 376f59049462a67d06f968aebee2f5c3b4dd7b0afd6877098e75f9981c1bc4c3: Status 404 returned error can't find the container with id 376f59049462a67d06f968aebee2f5c3b4dd7b0afd6877098e75f9981c1bc4c3 Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.552595 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.633951 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" event={"ID":"daff51c8-c909-418f-89c8-a3db427d82be","Type":"ContainerStarted","Data":"39a7fcd8d24890ebaa8f5e938bba0244241f8e5a24d3f854ba364328df86d5ad"} Dec 06 08:27:43 crc kubenswrapper[4763]: I1206 08:27:43.636296 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" event={"ID":"862af203-fa4c-4c9c-a509-3ece19348075","Type":"ContainerStarted","Data":"376f59049462a67d06f968aebee2f5c3b4dd7b0afd6877098e75f9981c1bc4c3"} Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.098384 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.124214 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.126283 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.132662 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.279317 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wqh4\" (UniqueName: \"kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.279417 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.279473 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.380782 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.380865 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.380966 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wqh4\" (UniqueName: \"kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.382724 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.383306 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.395043 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.407209 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wqh4\" (UniqueName: \"kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4\") pod \"dnsmasq-dns-6c768d6f65-9zlj6\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.431106 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.432961 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.443054 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.452918 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.481739 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.481779 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmb2d\" (UniqueName: \"kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.482133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.584565 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.584651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.584682 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmb2d\" (UniqueName: \"kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.585588 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.585603 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.603254 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmb2d\" (UniqueName: \"kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d\") pod \"dnsmasq-dns-fd5849d85-rzts8\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.741621 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.750764 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.791657 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.793300 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.797963 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.889080 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjfln\" (UniqueName: \"kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.889129 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.889204 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.990210 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.990288 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjfln\" (UniqueName: \"kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.990310 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.991110 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:47 crc kubenswrapper[4763]: I1206 08:27:47.992508 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.010806 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjfln\" (UniqueName: \"kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln\") pod \"dnsmasq-dns-7fc86f595f-rr6mx\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.120117 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.243806 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.245149 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.247303 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.247661 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.247737 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.251915 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.252149 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-pkr8v" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.252303 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.252700 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.261255 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293328 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293395 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zc6t\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-kube-api-access-4zc6t\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293516 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e18a4dfa-5953-422a-be11-7ae83ab5ec09-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293622 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293644 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293678 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293740 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293821 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293850 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293876 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.293955 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e18a4dfa-5953-422a-be11-7ae83ab5ec09-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395418 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395478 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395511 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395538 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e18a4dfa-5953-422a-be11-7ae83ab5ec09-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395594 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395624 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zc6t\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-kube-api-access-4zc6t\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395654 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e18a4dfa-5953-422a-be11-7ae83ab5ec09-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395701 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395726 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395756 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.395798 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.396943 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.397227 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.397785 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.398084 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.398530 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.399425 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e18a4dfa-5953-422a-be11-7ae83ab5ec09-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.400876 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e18a4dfa-5953-422a-be11-7ae83ab5ec09-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.402145 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.402989 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e18a4dfa-5953-422a-be11-7ae83ab5ec09-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.408188 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.412377 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zc6t\" (UniqueName: \"kubernetes.io/projected/e18a4dfa-5953-422a-be11-7ae83ab5ec09-kube-api-access-4zc6t\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.419839 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"e18a4dfa-5953-422a-be11-7ae83ab5ec09\") " pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.531851 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.533532 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.536057 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.536297 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.536854 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nhvp8" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.537290 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.537323 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.537374 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.537857 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.550919 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.583379 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603212 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603269 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603285 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603351 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603383 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603407 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603449 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603473 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603487 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqdsb\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603526 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.603548 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.705139 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.705202 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.705250 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.705935 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.705281 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706093 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706158 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706264 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706317 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqdsb\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706363 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706397 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.706513 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.707184 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.707349 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.707619 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.708417 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.709050 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.709529 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.716483 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.720252 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.722637 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.724887 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqdsb\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.728960 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.872494 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.892495 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.895853 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.906082 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.906629 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.906994 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.907072 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-l7674" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.907294 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.907407 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.907421 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 06 08:27:48 crc kubenswrapper[4763]: I1206 08:27:48.907504 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.011831 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.011886 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.011921 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.011955 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.011992 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012039 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012114 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012142 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012345 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012496 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlvjq\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.012629 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.113923 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.113981 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlvjq\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114001 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114039 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114058 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114073 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114098 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114126 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114144 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114168 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114190 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114477 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114605 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.114644 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.115189 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.116052 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.117799 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.124498 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.125012 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.125812 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.136514 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.140485 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlvjq\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.141189 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:49 crc kubenswrapper[4763]: I1206 08:27:49.239357 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.640368 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.642422 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.646056 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.646361 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.647105 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.648794 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-flw4j" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.653489 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.656795 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741008 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741080 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw5jm\" (UniqueName: \"kubernetes.io/projected/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kube-api-access-bw5jm\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741162 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741191 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741246 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741277 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741321 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.741344 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.842983 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843039 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843079 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843096 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843117 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843145 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw5jm\" (UniqueName: \"kubernetes.io/projected/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kube-api-access-bw5jm\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843199 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843247 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.843760 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.844762 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.844081 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.855044 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.904099 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.904558 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw5jm\" (UniqueName: \"kubernetes.io/projected/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-kube-api-access-bw5jm\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.908243 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.932173 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de1b0280-c39f-4e3d-98b9-cdbb0085e6e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.940337 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1\") " pod="openstack/openstack-galera-0" Dec 06 08:27:50 crc kubenswrapper[4763]: I1206 08:27:50.969803 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.163318 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.164761 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.166767 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-c8487" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.167470 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.167485 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.168441 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.186589 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280307 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d74kc\" (UniqueName: \"kubernetes.io/projected/00c834db-e265-44e2-9915-2be0931014a5-kube-api-access-d74kc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280379 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280526 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280601 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280695 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280747 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00c834db-e265-44e2-9915-2be0931014a5-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280807 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.280834 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.313682 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.314871 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.318649 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.319057 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4m48z" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.319221 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.331778 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.381811 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.381866 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.381970 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8rj6\" (UniqueName: \"kubernetes.io/projected/b699a6d2-a0ce-4be7-9173-524d485cbd89-kube-api-access-q8rj6\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.381999 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382057 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382131 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00c834db-e265-44e2-9915-2be0931014a5-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382172 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382195 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-kolla-config\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382223 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382238 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-config-data\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d74kc\" (UniqueName: \"kubernetes.io/projected/00c834db-e265-44e2-9915-2be0931014a5-kube-api-access-d74kc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382376 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382454 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382627 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382648 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.382765 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00c834db-e265-44e2-9915-2be0931014a5-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.383715 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.383853 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c834db-e265-44e2-9915-2be0931014a5-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.396729 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.405005 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00c834db-e265-44e2-9915-2be0931014a5-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.416336 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.420386 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d74kc\" (UniqueName: \"kubernetes.io/projected/00c834db-e265-44e2-9915-2be0931014a5-kube-api-access-d74kc\") pod \"openstack-cell1-galera-0\" (UID: \"00c834db-e265-44e2-9915-2be0931014a5\") " pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.483991 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.484074 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8rj6\" (UniqueName: \"kubernetes.io/projected/b699a6d2-a0ce-4be7-9173-524d485cbd89-kube-api-access-q8rj6\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.484098 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.484155 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-kolla-config\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.484181 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-config-data\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.485175 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-config-data\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.485277 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b699a6d2-a0ce-4be7-9173-524d485cbd89-kolla-config\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.485782 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.491288 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.497005 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b699a6d2-a0ce-4be7-9173-524d485cbd89-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.504958 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8rj6\" (UniqueName: \"kubernetes.io/projected/b699a6d2-a0ce-4be7-9173-524d485cbd89-kube-api-access-q8rj6\") pod \"memcached-0\" (UID: \"b699a6d2-a0ce-4be7-9173-524d485cbd89\") " pod="openstack/memcached-0" Dec 06 08:27:52 crc kubenswrapper[4763]: I1206 08:27:52.670762 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.655303 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.656702 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.658608 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-tc88c" Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.665442 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.858843 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vqk7\" (UniqueName: \"kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7\") pod \"kube-state-metrics-0\" (UID: \"4260e0fd-067b-4a47-8a55-0514868766aa\") " pod="openstack/kube-state-metrics-0" Dec 06 08:27:54 crc kubenswrapper[4763]: I1206 08:27:54.961592 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vqk7\" (UniqueName: \"kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7\") pod \"kube-state-metrics-0\" (UID: \"4260e0fd-067b-4a47-8a55-0514868766aa\") " pod="openstack/kube-state-metrics-0" Dec 06 08:27:55 crc kubenswrapper[4763]: I1206 08:27:55.009119 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vqk7\" (UniqueName: \"kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7\") pod \"kube-state-metrics-0\" (UID: \"4260e0fd-067b-4a47-8a55-0514868766aa\") " pod="openstack/kube-state-metrics-0" Dec 06 08:27:55 crc kubenswrapper[4763]: I1206 08:27:55.067517 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.154005 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.155782 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.158642 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.161990 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.162474 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.162618 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.163420 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-b6l5c" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.169382 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.177311 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282055 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282522 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282560 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282586 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wgcp\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282609 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282638 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282663 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.282685 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383662 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383763 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383798 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383824 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wgcp\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383844 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383877 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383921 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.383941 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.385849 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.387313 4763 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.387350 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b015cf8d820501a423550d1415408204b77e53ec4d768da3cd0e5c2a5ce9ba08/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.394621 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.444027 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.452535 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.452854 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.455423 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wgcp\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.456287 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.457448 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.496550 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:27:56 crc kubenswrapper[4763]: I1206 08:27:56.656655 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.837985 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mvnv6"] Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.843429 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.853310 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-845xw" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.853361 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.853628 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.863382 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mvnv6"] Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.873243 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-hqktq"] Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.893165 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.897738 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hqktq"] Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909367 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909447 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-combined-ca-bundle\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909534 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9jlk\" (UniqueName: \"kubernetes.io/projected/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-kube-api-access-t9jlk\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909684 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-ovn-controller-tls-certs\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909787 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-log-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909842 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-scripts\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:57 crc kubenswrapper[4763]: I1206 08:27:57.909865 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011081 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011374 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-etc-ovs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011418 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-combined-ca-bundle\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011436 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9jlk\" (UniqueName: \"kubernetes.io/projected/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-kube-api-access-t9jlk\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011463 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-lib\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011492 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-ovn-controller-tls-certs\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011518 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-run\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011541 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-log-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011557 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-log\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011578 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011591 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-scripts\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011610 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8lgs\" (UniqueName: \"kubernetes.io/projected/3afc31ec-e08a-4564-afb9-dda5f891cb5c-kube-api-access-w8lgs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.011641 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3afc31ec-e08a-4564-afb9-dda5f891cb5c-scripts\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.012114 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.012773 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-log-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.015101 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-var-run-ovn\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.017342 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-scripts\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.023384 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-combined-ca-bundle\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.030363 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9jlk\" (UniqueName: \"kubernetes.io/projected/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-kube-api-access-t9jlk\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.033088 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd10dfb-5dd9-4271-94aa-60b8fed4ba2b-ovn-controller-tls-certs\") pod \"ovn-controller-mvnv6\" (UID: \"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b\") " pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141096 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-etc-ovs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141205 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-lib\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141262 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-run\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141295 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-log\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141331 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8lgs\" (UniqueName: \"kubernetes.io/projected/3afc31ec-e08a-4564-afb9-dda5f891cb5c-kube-api-access-w8lgs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141377 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3afc31ec-e08a-4564-afb9-dda5f891cb5c-scripts\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141655 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-run\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-etc-ovs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.141986 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-log\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.142029 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3afc31ec-e08a-4564-afb9-dda5f891cb5c-var-lib\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.143831 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3afc31ec-e08a-4564-afb9-dda5f891cb5c-scripts\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.163732 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8lgs\" (UniqueName: \"kubernetes.io/projected/3afc31ec-e08a-4564-afb9-dda5f891cb5c-kube-api-access-w8lgs\") pod \"ovn-controller-ovs-hqktq\" (UID: \"3afc31ec-e08a-4564-afb9-dda5f891cb5c\") " pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.172675 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.208477 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.701519 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.703248 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.705644 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kk9f8" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.705789 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.707766 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.708157 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.708325 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.715134 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853310 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853371 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853424 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fpvn\" (UniqueName: \"kubernetes.io/projected/678168e1-cecc-486d-b2eb-366c90a302c5-kube-api-access-2fpvn\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853442 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853473 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853493 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-config\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853561 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.853591 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955359 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-config\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955469 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955495 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955589 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955621 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955662 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fpvn\" (UniqueName: \"kubernetes.io/projected/678168e1-cecc-486d-b2eb-366c90a302c5-kube-api-access-2fpvn\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955683 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.955711 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.960229 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.960568 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.961404 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-config\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.961603 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/678168e1-cecc-486d-b2eb-366c90a302c5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.965228 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.965777 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.968082 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/678168e1-cecc-486d-b2eb-366c90a302c5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:58 crc kubenswrapper[4763]: I1206 08:27:58.986851 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fpvn\" (UniqueName: \"kubernetes.io/projected/678168e1-cecc-486d-b2eb-366c90a302c5-kube-api-access-2fpvn\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:59 crc kubenswrapper[4763]: I1206 08:27:59.002462 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"678168e1-cecc-486d-b2eb-366c90a302c5\") " pod="openstack/ovsdbserver-nb-0" Dec 06 08:27:59 crc kubenswrapper[4763]: I1206 08:27:59.041308 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.832326 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.834362 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.837805 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.837876 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.838067 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.838341 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-92tww" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.842661 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.962683 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963070 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-config\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963115 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963134 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr2c4\" (UniqueName: \"kubernetes.io/projected/23914339-150e-409f-bd6d-7a1c91529a22-kube-api-access-pr2c4\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963150 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963172 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963202 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/23914339-150e-409f-bd6d-7a1c91529a22-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:01 crc kubenswrapper[4763]: I1206 08:28:01.963227 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.064819 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.064882 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.064953 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr2c4\" (UniqueName: \"kubernetes.io/projected/23914339-150e-409f-bd6d-7a1c91529a22-kube-api-access-pr2c4\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.064987 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065032 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/23914339-150e-409f-bd6d-7a1c91529a22-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065066 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065125 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065217 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-config\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065269 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.065862 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/23914339-150e-409f-bd6d-7a1c91529a22-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.066056 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-config\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.067129 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23914339-150e-409f-bd6d-7a1c91529a22-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.070401 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.071581 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.072421 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23914339-150e-409f-bd6d-7a1c91529a22-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.081373 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr2c4\" (UniqueName: \"kubernetes.io/projected/23914339-150e-409f-bd6d-7a1c91529a22-kube-api-access-pr2c4\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.100542 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"23914339-150e-409f-bd6d-7a1c91529a22\") " pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:02 crc kubenswrapper[4763]: I1206 08:28:02.158913 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:05 crc kubenswrapper[4763]: W1206 08:28:05.459965 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcff0e31_9690_4020_ae0e_fed5a80ccd49.slice/crio-09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4 WatchSource:0}: Error finding container 09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4: Status 404 returned error can't find the container with id 09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4 Dec 06 08:28:05 crc kubenswrapper[4763]: I1206 08:28:05.843874 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" event={"ID":"fcff0e31-9690-4020-ae0e-fed5a80ccd49","Type":"ContainerStarted","Data":"09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4"} Dec 06 08:28:05 crc kubenswrapper[4763]: I1206 08:28:05.875164 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.165957 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.166012 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.166162 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x8lxs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5bbff789c7-257zl_openstack(daff51c8-c909-418f-89c8-a3db427d82be): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.167592 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" podUID="daff51c8-c909-418f-89c8-a3db427d82be" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.193865 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.193943 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.194090 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b4cnw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6f78fc7c85-ggz7g_openstack(862af203-fa4c-4c9c-a509-3ece19348075): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:28:06 crc kubenswrapper[4763]: E1206 08:28:06.195415 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" podUID="862af203-fa4c-4c9c-a509-3ece19348075" Dec 06 08:28:06 crc kubenswrapper[4763]: W1206 08:28:06.197221 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode18a4dfa_5953_422a_be11_7ae83ab5ec09.slice/crio-9c05995a80338067acff2ca9b4b27e27ac76571eada9bbd588c5b8841f2ca382 WatchSource:0}: Error finding container 9c05995a80338067acff2ca9b4b27e27ac76571eada9bbd588c5b8841f2ca382: Status 404 returned error can't find the container with id 9c05995a80338067acff2ca9b4b27e27ac76571eada9bbd588c5b8841f2ca382 Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.646719 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.674791 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.688949 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.853800 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"e18a4dfa-5953-422a-be11-7ae83ab5ec09","Type":"ContainerStarted","Data":"9c05995a80338067acff2ca9b4b27e27ac76571eada9bbd588c5b8841f2ca382"} Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.858225 4763 generic.go:334] "Generic (PLEG): container finished" podID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerID="00f41ef4867c5c94184eb45441aa5a437b6e90a6d770e8941e0d43cdd1ceeb0a" exitCode=0 Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.858314 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" event={"ID":"fcff0e31-9690-4020-ae0e-fed5a80ccd49","Type":"ContainerDied","Data":"00f41ef4867c5c94184eb45441aa5a437b6e90a6d770e8941e0d43cdd1ceeb0a"} Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.859888 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1","Type":"ContainerStarted","Data":"d0a54450a926719f64fb7c39873fd7d1d5d02daece68914601458e9502e51127"} Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.862395 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerStarted","Data":"659aa450485048d1f0046170e3e8088d175d1ed6dd020f6b7783a3ca8491038e"} Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.863979 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" event={"ID":"a1653ba8-d12f-4539-8542-a67380587248","Type":"ContainerStarted","Data":"cbef5a26385cde7212987aeef820b27e26aaac1c1c2c3eaf2f297eed729a1b96"} Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.929910 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:28:06 crc kubenswrapper[4763]: W1206 08:28:06.941537 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod293062e3_bba3_4bb9_a750_586c7285d5b1.slice/crio-b3fd8e2bc1b353a3ca2827ec6e53da58c546424c0c1f2230e2a1a6264c79e758 WatchSource:0}: Error finding container b3fd8e2bc1b353a3ca2827ec6e53da58c546424c0c1f2230e2a1a6264c79e758: Status 404 returned error can't find the container with id b3fd8e2bc1b353a3ca2827ec6e53da58c546424c0c1f2230e2a1a6264c79e758 Dec 06 08:28:06 crc kubenswrapper[4763]: I1206 08:28:06.946348 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:28:06 crc kubenswrapper[4763]: W1206 08:28:06.952135 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95ce87d2_e5c0_41f4_948a_e78e26077c91.slice/crio-1cd3949dc76d0f52c87272fc7b725ba28277a33dfbba08824cd1bed69d1d868b WatchSource:0}: Error finding container 1cd3949dc76d0f52c87272fc7b725ba28277a33dfbba08824cd1bed69d1d868b: Status 404 returned error can't find the container with id 1cd3949dc76d0f52c87272fc7b725ba28277a33dfbba08824cd1bed69d1d868b Dec 06 08:28:07 crc kubenswrapper[4763]: E1206 08:28:07.143681 4763 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 06 08:28:07 crc kubenswrapper[4763]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/fcff0e31-9690-4020-ae0e-fed5a80ccd49/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 06 08:28:07 crc kubenswrapper[4763]: > podSandboxID="09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4" Dec 06 08:28:07 crc kubenswrapper[4763]: E1206 08:28:07.143837 4763 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 06 08:28:07 crc kubenswrapper[4763]: container &Container{Name:dnsmasq-dns,Image:38.102.83.156:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c6h54h5b5h8fh59chb8h657h5c6hfbhfh4h68fh5f7h9fhc7h594h8hc7h5bfh56ch5fbh688h5bch699h5f6h55fh564h64h5dfh5dch586h75q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kmb2d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-fd5849d85-rzts8_openstack(fcff0e31-9690-4020-ae0e-fed5a80ccd49): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/fcff0e31-9690-4020-ae0e-fed5a80ccd49/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 06 08:28:07 crc kubenswrapper[4763]: > logger="UnhandledError" Dec 06 08:28:07 crc kubenswrapper[4763]: E1206 08:28:07.145934 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/fcff0e31-9690-4020-ae0e-fed5a80ccd49/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.361379 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.362360 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.370129 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config\") pod \"daff51c8-c909-418f-89c8-a3db427d82be\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.370416 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8lxs\" (UniqueName: \"kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs\") pod \"daff51c8-c909-418f-89c8-a3db427d82be\" (UID: \"daff51c8-c909-418f-89c8-a3db427d82be\") " Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.371548 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config" (OuterVolumeSpecName: "config") pod "daff51c8-c909-418f-89c8-a3db427d82be" (UID: "daff51c8-c909-418f-89c8-a3db427d82be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.383538 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daff51c8-c909-418f-89c8-a3db427d82be-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.394110 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs" (OuterVolumeSpecName: "kube-api-access-x8lxs") pod "daff51c8-c909-418f-89c8-a3db427d82be" (UID: "daff51c8-c909-418f-89c8-a3db427d82be"). InnerVolumeSpecName "kube-api-access-x8lxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.484592 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4cnw\" (UniqueName: \"kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw\") pod \"862af203-fa4c-4c9c-a509-3ece19348075\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.484678 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc\") pod \"862af203-fa4c-4c9c-a509-3ece19348075\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.484696 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config\") pod \"862af203-fa4c-4c9c-a509-3ece19348075\" (UID: \"862af203-fa4c-4c9c-a509-3ece19348075\") " Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.485311 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "862af203-fa4c-4c9c-a509-3ece19348075" (UID: "862af203-fa4c-4c9c-a509-3ece19348075"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.485720 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config" (OuterVolumeSpecName: "config") pod "862af203-fa4c-4c9c-a509-3ece19348075" (UID: "862af203-fa4c-4c9c-a509-3ece19348075"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.485787 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.485799 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8lxs\" (UniqueName: \"kubernetes.io/projected/daff51c8-c909-418f-89c8-a3db427d82be-kube-api-access-x8lxs\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.496799 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw" (OuterVolumeSpecName: "kube-api-access-b4cnw") pod "862af203-fa4c-4c9c-a509-3ece19348075" (UID: "862af203-fa4c-4c9c-a509-3ece19348075"). InnerVolumeSpecName "kube-api-access-b4cnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.508668 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.527932 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.538157 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mvnv6"] Dec 06 08:28:07 crc kubenswrapper[4763]: W1206 08:28:07.542482 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd80731d8_91ee_438d_a5c8_da9d36005e6f.slice/crio-de10312443d2f8f1ddc81a6fcf180d601768b87904925a1bfb812370569908d4 WatchSource:0}: Error finding container de10312443d2f8f1ddc81a6fcf180d601768b87904925a1bfb812370569908d4: Status 404 returned error can't find the container with id de10312443d2f8f1ddc81a6fcf180d601768b87904925a1bfb812370569908d4 Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.557166 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.565193 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.594468 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4cnw\" (UniqueName: \"kubernetes.io/projected/862af203-fa4c-4c9c-a509-3ece19348075-kube-api-access-b4cnw\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.594532 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/862af203-fa4c-4c9c-a509-3ece19348075-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.606485 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: W1206 08:28:07.613593 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod678168e1_cecc_486d_b2eb_366c90a302c5.slice/crio-8bc1c8cca8b82498edc389632d9978d92d5628e3fbb91dac57248503db7985a9 WatchSource:0}: Error finding container 8bc1c8cca8b82498edc389632d9978d92d5628e3fbb91dac57248503db7985a9: Status 404 returned error can't find the container with id 8bc1c8cca8b82498edc389632d9978d92d5628e3fbb91dac57248503db7985a9 Dec 06 08:28:07 crc kubenswrapper[4763]: W1206 08:28:07.619044 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00c834db_e265_44e2_9915_2be0931014a5.slice/crio-cc3c638bde25714d62d07ec12816fe2e743d58b812f2998b9af6dd030e872655 WatchSource:0}: Error finding container cc3c638bde25714d62d07ec12816fe2e743d58b812f2998b9af6dd030e872655: Status 404 returned error can't find the container with id cc3c638bde25714d62d07ec12816fe2e743d58b812f2998b9af6dd030e872655 Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.658636 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 06 08:28:07 crc kubenswrapper[4763]: W1206 08:28:07.670030 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23914339_150e_409f_bd6d_7a1c91529a22.slice/crio-2b049c7c254156dc09cf1692850243206c4513965a2e5fb8e103702dd2e6ada4 WatchSource:0}: Error finding container 2b049c7c254156dc09cf1692850243206c4513965a2e5fb8e103702dd2e6ada4: Status 404 returned error can't find the container with id 2b049c7c254156dc09cf1692850243206c4513965a2e5fb8e103702dd2e6ada4 Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.877063 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"678168e1-cecc-486d-b2eb-366c90a302c5","Type":"ContainerStarted","Data":"8bc1c8cca8b82498edc389632d9978d92d5628e3fbb91dac57248503db7985a9"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.878258 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerStarted","Data":"1cd3949dc76d0f52c87272fc7b725ba28277a33dfbba08824cd1bed69d1d868b"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.879395 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mvnv6" event={"ID":"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b","Type":"ContainerStarted","Data":"75c59d84ed9552a5ef2ad5a5d662c53a628ffee15e9dd6f0644683d383d4e6a5"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.880882 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerStarted","Data":"de10312443d2f8f1ddc81a6fcf180d601768b87904925a1bfb812370569908d4"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.895734 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" event={"ID":"862af203-fa4c-4c9c-a509-3ece19348075","Type":"ContainerDied","Data":"376f59049462a67d06f968aebee2f5c3b4dd7b0afd6877098e75f9981c1bc4c3"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.895779 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f78fc7c85-ggz7g" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.900793 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00c834db-e265-44e2-9915-2be0931014a5","Type":"ContainerStarted","Data":"cc3c638bde25714d62d07ec12816fe2e743d58b812f2998b9af6dd030e872655"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.906568 4763 generic.go:334] "Generic (PLEG): container finished" podID="a1653ba8-d12f-4539-8542-a67380587248" containerID="5180fc3a0813011c53e9c9f75d1b511b02413eeaee2194f042634d33606c796a" exitCode=0 Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.906633 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" event={"ID":"a1653ba8-d12f-4539-8542-a67380587248","Type":"ContainerDied","Data":"5180fc3a0813011c53e9c9f75d1b511b02413eeaee2194f042634d33606c796a"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.909302 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"23914339-150e-409f-bd6d-7a1c91529a22","Type":"ContainerStarted","Data":"2b049c7c254156dc09cf1692850243206c4513965a2e5fb8e103702dd2e6ada4"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.910676 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b699a6d2-a0ce-4be7-9173-524d485cbd89","Type":"ContainerStarted","Data":"039a43a1156a9e7e0d8725720d1d72ac4bc162758a687f6bcf6498e7540c629d"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.914145 4763 generic.go:334] "Generic (PLEG): container finished" podID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerID="c1852ac14b3439d451dbe63838c82dd9db7981907472d40943436e4be0f89712" exitCode=0 Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.914215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" event={"ID":"293062e3-bba3-4bb9-a750-586c7285d5b1","Type":"ContainerDied","Data":"c1852ac14b3439d451dbe63838c82dd9db7981907472d40943436e4be0f89712"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.914238 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" event={"ID":"293062e3-bba3-4bb9-a750-586c7285d5b1","Type":"ContainerStarted","Data":"b3fd8e2bc1b353a3ca2827ec6e53da58c546424c0c1f2230e2a1a6264c79e758"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.915782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4260e0fd-067b-4a47-8a55-0514868766aa","Type":"ContainerStarted","Data":"e5d5e3b80da4aa51a09e513217fee6737b645fb9e206087957625b97dc5f0e86"} Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.931865 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" Dec 06 08:28:07 crc kubenswrapper[4763]: I1206 08:28:07.932371 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bbff789c7-257zl" event={"ID":"daff51c8-c909-418f-89c8-a3db427d82be","Type":"ContainerDied","Data":"39a7fcd8d24890ebaa8f5e938bba0244241f8e5a24d3f854ba364328df86d5ad"} Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.098633 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.102083 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f78fc7c85-ggz7g"] Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.132288 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.166083 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bbff789c7-257zl"] Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.230572 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hqktq"] Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.300945 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.406449 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wqh4\" (UniqueName: \"kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4\") pod \"a1653ba8-d12f-4539-8542-a67380587248\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.406602 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc\") pod \"a1653ba8-d12f-4539-8542-a67380587248\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.406784 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config\") pod \"a1653ba8-d12f-4539-8542-a67380587248\" (UID: \"a1653ba8-d12f-4539-8542-a67380587248\") " Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.425514 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4" (OuterVolumeSpecName: "kube-api-access-4wqh4") pod "a1653ba8-d12f-4539-8542-a67380587248" (UID: "a1653ba8-d12f-4539-8542-a67380587248"). InnerVolumeSpecName "kube-api-access-4wqh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.428785 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config" (OuterVolumeSpecName: "config") pod "a1653ba8-d12f-4539-8542-a67380587248" (UID: "a1653ba8-d12f-4539-8542-a67380587248"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.429458 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a1653ba8-d12f-4539-8542-a67380587248" (UID: "a1653ba8-d12f-4539-8542-a67380587248"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.509192 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wqh4\" (UniqueName: \"kubernetes.io/projected/a1653ba8-d12f-4539-8542-a67380587248-kube-api-access-4wqh4\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.509278 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.509292 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1653ba8-d12f-4539-8542-a67380587248-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.945257 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.945984 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c768d6f65-9zlj6" event={"ID":"a1653ba8-d12f-4539-8542-a67380587248","Type":"ContainerDied","Data":"cbef5a26385cde7212987aeef820b27e26aaac1c1c2c3eaf2f297eed729a1b96"} Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.946265 4763 scope.go:117] "RemoveContainer" containerID="5180fc3a0813011c53e9c9f75d1b511b02413eeaee2194f042634d33606c796a" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.952844 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" event={"ID":"293062e3-bba3-4bb9-a750-586c7285d5b1","Type":"ContainerStarted","Data":"1594cb17e4d0dcba8bc61d42079f694ba08d20fb3c8461e9d4dc67a4a2f7e5f3"} Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.953042 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.958987 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" event={"ID":"fcff0e31-9690-4020-ae0e-fed5a80ccd49","Type":"ContainerStarted","Data":"0e8d6e7f11b684f09dd4cf52a3273bcc25a69817ea3b9aa5ba47acd107e7d3dc"} Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.959219 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:28:08 crc kubenswrapper[4763]: I1206 08:28:08.961524 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hqktq" event={"ID":"3afc31ec-e08a-4564-afb9-dda5f891cb5c","Type":"ContainerStarted","Data":"5b3c12ac402b8b93dd67c70e3c63587c8fff6aede82a4fbd03b06117286cf622"} Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.003495 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" podStartSLOduration=22.00347487 podStartE2EDuration="22.00347487s" podCreationTimestamp="2025-12-06 08:27:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:08.979718321 +0000 UTC m=+971.555423359" watchObservedRunningTime="2025-12-06 08:28:09.00347487 +0000 UTC m=+971.579179908" Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.018794 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" podStartSLOduration=21.086462869 podStartE2EDuration="22.018771771s" podCreationTimestamp="2025-12-06 08:27:47 +0000 UTC" firstStartedPulling="2025-12-06 08:28:05.470087096 +0000 UTC m=+968.045792134" lastFinishedPulling="2025-12-06 08:28:06.402395998 +0000 UTC m=+968.978101036" observedRunningTime="2025-12-06 08:28:08.998318571 +0000 UTC m=+971.574023609" watchObservedRunningTime="2025-12-06 08:28:09.018771771 +0000 UTC m=+971.594476809" Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.031101 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.037077 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c768d6f65-9zlj6"] Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.734936 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="862af203-fa4c-4c9c-a509-3ece19348075" path="/var/lib/kubelet/pods/862af203-fa4c-4c9c-a509-3ece19348075/volumes" Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.735297 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1653ba8-d12f-4539-8542-a67380587248" path="/var/lib/kubelet/pods/a1653ba8-d12f-4539-8542-a67380587248/volumes" Dec 06 08:28:09 crc kubenswrapper[4763]: I1206 08:28:09.735784 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daff51c8-c909-418f-89c8-a3db427d82be" path="/var/lib/kubelet/pods/daff51c8-c909-418f-89c8-a3db427d82be/volumes" Dec 06 08:28:12 crc kubenswrapper[4763]: I1206 08:28:12.537104 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:28:12 crc kubenswrapper[4763]: I1206 08:28:12.538198 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:28:12 crc kubenswrapper[4763]: I1206 08:28:12.538328 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:28:12 crc kubenswrapper[4763]: I1206 08:28:12.539562 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:28:12 crc kubenswrapper[4763]: I1206 08:28:12.539630 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc" gracePeriod=600 Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.010006 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc" exitCode=0 Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.010411 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc"} Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.123177 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.179117 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.179323 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="dnsmasq-dns" containerID="cri-o://0e8d6e7f11b684f09dd4cf52a3273bcc25a69817ea3b9aa5ba47acd107e7d3dc" gracePeriod=10 Dec 06 08:28:13 crc kubenswrapper[4763]: I1206 08:28:13.184402 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:28:14 crc kubenswrapper[4763]: I1206 08:28:14.019283 4763 generic.go:334] "Generic (PLEG): container finished" podID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerID="0e8d6e7f11b684f09dd4cf52a3273bcc25a69817ea3b9aa5ba47acd107e7d3dc" exitCode=0 Dec 06 08:28:14 crc kubenswrapper[4763]: I1206 08:28:14.019324 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" event={"ID":"fcff0e31-9690-4020-ae0e-fed5a80ccd49","Type":"ContainerDied","Data":"0e8d6e7f11b684f09dd4cf52a3273bcc25a69817ea3b9aa5ba47acd107e7d3dc"} Dec 06 08:28:15 crc kubenswrapper[4763]: I1206 08:28:15.033297 4763 scope.go:117] "RemoveContainer" containerID="1300a82f37d1d362b5ee04ae557ec46ab85297772068f4f4d8becb8428ab897a" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.519283 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.662955 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config\") pod \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.663342 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc\") pod \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.663398 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmb2d\" (UniqueName: \"kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d\") pod \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\" (UID: \"fcff0e31-9690-4020-ae0e-fed5a80ccd49\") " Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.666680 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d" (OuterVolumeSpecName: "kube-api-access-kmb2d") pod "fcff0e31-9690-4020-ae0e-fed5a80ccd49" (UID: "fcff0e31-9690-4020-ae0e-fed5a80ccd49"). InnerVolumeSpecName "kube-api-access-kmb2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.703235 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fcff0e31-9690-4020-ae0e-fed5a80ccd49" (UID: "fcff0e31-9690-4020-ae0e-fed5a80ccd49"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.707982 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config" (OuterVolumeSpecName: "config") pod "fcff0e31-9690-4020-ae0e-fed5a80ccd49" (UID: "fcff0e31-9690-4020-ae0e-fed5a80ccd49"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.766359 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.766431 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fcff0e31-9690-4020-ae0e-fed5a80ccd49-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:16 crc kubenswrapper[4763]: I1206 08:28:16.766930 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmb2d\" (UniqueName: \"kubernetes.io/projected/fcff0e31-9690-4020-ae0e-fed5a80ccd49-kube-api-access-kmb2d\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.063780 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4"} Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.067949 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" event={"ID":"fcff0e31-9690-4020-ae0e-fed5a80ccd49","Type":"ContainerDied","Data":"09aca4f56a19f5565c32584e44ef6fa8daf2528223d11db68ed232f4286387b4"} Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.068006 4763 scope.go:117] "RemoveContainer" containerID="0e8d6e7f11b684f09dd4cf52a3273bcc25a69817ea3b9aa5ba47acd107e7d3dc" Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.068067 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd5849d85-rzts8" Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.129992 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.145223 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fd5849d85-rzts8"] Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.526095 4763 scope.go:117] "RemoveContainer" containerID="00f41ef4867c5c94184eb45441aa5a437b6e90a6d770e8941e0d43cdd1ceeb0a" Dec 06 08:28:17 crc kubenswrapper[4763]: I1206 08:28:17.734761 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" path="/var/lib/kubelet/pods/fcff0e31-9690-4020-ae0e-fed5a80ccd49/volumes" Dec 06 08:28:18 crc kubenswrapper[4763]: I1206 08:28:18.076543 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1","Type":"ContainerStarted","Data":"b6ef96d4320e6b803fa48520515162cbf2a8ed5447e6efcd5c60e74fd1a5262c"} Dec 06 08:28:18 crc kubenswrapper[4763]: I1206 08:28:18.077760 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b699a6d2-a0ce-4be7-9173-524d485cbd89","Type":"ContainerStarted","Data":"a78a594a3a17589aee605b1e5fecdd8c26041779669b8e7812c087834316d88e"} Dec 06 08:28:18 crc kubenswrapper[4763]: I1206 08:28:18.078105 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 06 08:28:18 crc kubenswrapper[4763]: I1206 08:28:18.117159 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=18.015500569 podStartE2EDuration="26.117141887s" podCreationTimestamp="2025-12-06 08:27:52 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.60658926 +0000 UTC m=+970.182294308" lastFinishedPulling="2025-12-06 08:28:15.708230578 +0000 UTC m=+978.283935626" observedRunningTime="2025-12-06 08:28:18.112260545 +0000 UTC m=+980.687965603" watchObservedRunningTime="2025-12-06 08:28:18.117141887 +0000 UTC m=+980.692846925" Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.090917 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hqktq" event={"ID":"3afc31ec-e08a-4564-afb9-dda5f891cb5c","Type":"ContainerStarted","Data":"ea2cc5d60af71ff0309c3df6b9325bcc183cf9d23410fc2ed15f6f1f80c9030d"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.092343 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"23914339-150e-409f-bd6d-7a1c91529a22","Type":"ContainerStarted","Data":"1a18f1622d60e754c78fb1092931452b000ff3ba19f8071fe6b4cab73e7e9660"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.094674 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4260e0fd-067b-4a47-8a55-0514868766aa","Type":"ContainerStarted","Data":"a933ad9151c407c00d004a6db317c197ecf74af4cfed750a526a0389a15fdb55"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.094753 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.096192 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mvnv6" event={"ID":"abd10dfb-5dd9-4271-94aa-60b8fed4ba2b","Type":"ContainerStarted","Data":"b38847855051aa9f6a79ef615543276e0ecf4b7ec22678cf60b710185c6a63f5"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.096333 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-mvnv6" Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.097830 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00c834db-e265-44e2-9915-2be0931014a5","Type":"ContainerStarted","Data":"b24dfa746f06309fc6539be78697f9987764a136fc75bfe5f3d1efca152ad22a"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.100332 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"678168e1-cecc-486d-b2eb-366c90a302c5","Type":"ContainerStarted","Data":"7e286b58931726fc22df8d2db292223e29dc311f51c1f1cdbd52e82af8ce517d"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.102291 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerStarted","Data":"3b79be3dce95fdf97dc1243a8fffaa1bba79366e8c1a443fffc8ef3b3a25f3f0"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.104123 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"e18a4dfa-5953-422a-be11-7ae83ab5ec09","Type":"ContainerStarted","Data":"95bde55144df899e1de9b7726ea5fd2c9861a0b97343ca6da1379a05e03d206e"} Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.162427 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-mvnv6" podStartSLOduration=13.11655375 podStartE2EDuration="22.162409392s" podCreationTimestamp="2025-12-06 08:27:57 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.571085404 +0000 UTC m=+970.146790442" lastFinishedPulling="2025-12-06 08:28:16.616941056 +0000 UTC m=+979.192646084" observedRunningTime="2025-12-06 08:28:19.158331822 +0000 UTC m=+981.734036880" watchObservedRunningTime="2025-12-06 08:28:19.162409392 +0000 UTC m=+981.738114430" Dec 06 08:28:19 crc kubenswrapper[4763]: I1206 08:28:19.208526 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=14.479996059 podStartE2EDuration="25.208500297s" podCreationTimestamp="2025-12-06 08:27:54 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.542594316 +0000 UTC m=+970.118299354" lastFinishedPulling="2025-12-06 08:28:18.271098554 +0000 UTC m=+980.846803592" observedRunningTime="2025-12-06 08:28:19.201986601 +0000 UTC m=+981.777691649" watchObservedRunningTime="2025-12-06 08:28:19.208500297 +0000 UTC m=+981.784205335" Dec 06 08:28:20 crc kubenswrapper[4763]: I1206 08:28:20.114695 4763 generic.go:334] "Generic (PLEG): container finished" podID="3afc31ec-e08a-4564-afb9-dda5f891cb5c" containerID="ea2cc5d60af71ff0309c3df6b9325bcc183cf9d23410fc2ed15f6f1f80c9030d" exitCode=0 Dec 06 08:28:20 crc kubenswrapper[4763]: I1206 08:28:20.114829 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hqktq" event={"ID":"3afc31ec-e08a-4564-afb9-dda5f891cb5c","Type":"ContainerDied","Data":"ea2cc5d60af71ff0309c3df6b9325bcc183cf9d23410fc2ed15f6f1f80c9030d"} Dec 06 08:28:21 crc kubenswrapper[4763]: I1206 08:28:21.125849 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hqktq" event={"ID":"3afc31ec-e08a-4564-afb9-dda5f891cb5c","Type":"ContainerStarted","Data":"300c0e4182d8e5ffe2c79d924c82c3fc8828579613c34d42919d5750d3fefb65"} Dec 06 08:28:21 crc kubenswrapper[4763]: I1206 08:28:21.128335 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerStarted","Data":"025330a3d2028eb40b7d062410c48bd3146ff2cd92275be82d0b7b00bf80c41c"} Dec 06 08:28:22 crc kubenswrapper[4763]: I1206 08:28:22.673867 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 06 08:28:23 crc kubenswrapper[4763]: I1206 08:28:23.147618 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerStarted","Data":"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec"} Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.086719 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.272933 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:25 crc kubenswrapper[4763]: E1206 08:28:25.273367 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1653ba8-d12f-4539-8542-a67380587248" containerName="init" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.273388 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1653ba8-d12f-4539-8542-a67380587248" containerName="init" Dec 06 08:28:25 crc kubenswrapper[4763]: E1206 08:28:25.273401 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="dnsmasq-dns" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.273411 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="dnsmasq-dns" Dec 06 08:28:25 crc kubenswrapper[4763]: E1206 08:28:25.273426 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="init" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.273434 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="init" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.273645 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1653ba8-d12f-4539-8542-a67380587248" containerName="init" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.273678 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcff0e31-9690-4020-ae0e-fed5a80ccd49" containerName="dnsmasq-dns" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.274719 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.288268 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.352500 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.352571 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.352608 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2wnp\" (UniqueName: \"kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.455953 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.456026 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.456065 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2wnp\" (UniqueName: \"kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.457428 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.458034 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.483705 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2wnp\" (UniqueName: \"kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp\") pod \"dnsmasq-dns-6c69c9c9cf-rzlxr\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:25 crc kubenswrapper[4763]: I1206 08:28:25.629954 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.391274 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.398972 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.405001 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.405409 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.405462 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-x8lbv" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.405584 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.424007 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.477354 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzzd5\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-kube-api-access-xzzd5\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.477430 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-cache\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.477481 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-lock\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.477525 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.477559 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.578703 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-lock\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.578781 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.578815 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.578834 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzzd5\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-kube-api-access-xzzd5\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.578881 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-cache\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: E1206 08:28:26.579191 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:26 crc kubenswrapper[4763]: E1206 08:28:26.579239 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:26 crc kubenswrapper[4763]: E1206 08:28:26.579351 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:27.079293672 +0000 UTC m=+989.654998710 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.579515 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-cache\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.579516 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/df22632a-c5cb-4636-abfe-48f60e1df901-lock\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.581131 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.608365 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.619716 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzzd5\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-kube-api-access-xzzd5\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.951738 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-crchz"] Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.953652 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.963117 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.963132 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.973624 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 06 08:28:26 crc kubenswrapper[4763]: I1206 08:28:26.981738 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-crchz"] Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.000983 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001037 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001095 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001131 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001173 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001204 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxj6s\" (UniqueName: \"kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.001289 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.102849 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.102933 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.102969 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.103007 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.103042 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.103082 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxj6s\" (UniqueName: \"kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.103156 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.103178 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: E1206 08:28:27.103805 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:27 crc kubenswrapper[4763]: E1206 08:28:27.103891 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:27 crc kubenswrapper[4763]: E1206 08:28:27.104049 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:28.104026052 +0000 UTC m=+990.679731090 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.105697 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.106376 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.106609 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.108726 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.110015 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.120403 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.132643 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxj6s\" (UniqueName: \"kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s\") pod \"swift-ring-rebalance-crchz\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:27 crc kubenswrapper[4763]: I1206 08:28:27.307412 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:28 crc kubenswrapper[4763]: I1206 08:28:28.121410 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:28 crc kubenswrapper[4763]: E1206 08:28:28.121621 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:28 crc kubenswrapper[4763]: E1206 08:28:28.121663 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:28 crc kubenswrapper[4763]: E1206 08:28:28.121724 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:30.121703703 +0000 UTC m=+992.697408741 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:28 crc kubenswrapper[4763]: I1206 08:28:28.910410 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-crchz"] Dec 06 08:28:28 crc kubenswrapper[4763]: W1206 08:28:28.923526 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode74906c2_6446_4cb0_a428_61609a969406.slice/crio-33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0 WatchSource:0}: Error finding container 33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0: Status 404 returned error can't find the container with id 33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0 Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.046931 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:29 crc kubenswrapper[4763]: W1206 08:28:29.052617 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod664db788_e1cc_494f_a331_ebc3e7fa7bc8.slice/crio-b5ae889c54d9592113f50054db841a212c13723892e4776c1e844ebefc0a07e4 WatchSource:0}: Error finding container b5ae889c54d9592113f50054db841a212c13723892e4776c1e844ebefc0a07e4: Status 404 returned error can't find the container with id b5ae889c54d9592113f50054db841a212c13723892e4776c1e844ebefc0a07e4 Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.238551 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"23914339-150e-409f-bd6d-7a1c91529a22","Type":"ContainerStarted","Data":"a57ae1bf6d9b9ce02b7260e4e08048ca042fdc64aa424363ea5a135182c283de"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.241803 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"678168e1-cecc-486d-b2eb-366c90a302c5","Type":"ContainerStarted","Data":"8da5ac9b6ef054c835e1e4b501a5638a512103f98c05c41d2bc674cc49cad9e9"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.244047 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" event={"ID":"664db788-e1cc-494f-a331-ebc3e7fa7bc8","Type":"ContainerStarted","Data":"b5ae889c54d9592113f50054db841a212c13723892e4776c1e844ebefc0a07e4"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.246500 4763 generic.go:334] "Generic (PLEG): container finished" podID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" exitCode=0 Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.246550 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerDied","Data":"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.248642 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-crchz" event={"ID":"e74906c2-6446-4cb0-a428-61609a969406","Type":"ContainerStarted","Data":"33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.252701 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hqktq" event={"ID":"3afc31ec-e08a-4564-afb9-dda5f891cb5c","Type":"ContainerStarted","Data":"db86e9461e73a2bd4a622a6e092c587eba235fa0436c3f65875e0972fe5c941f"} Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.252969 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.253014 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.290917 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=8.477111262 podStartE2EDuration="29.290886795s" podCreationTimestamp="2025-12-06 08:28:00 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.673945393 +0000 UTC m=+970.249650431" lastFinishedPulling="2025-12-06 08:28:28.487720926 +0000 UTC m=+991.063425964" observedRunningTime="2025-12-06 08:28:29.265198552 +0000 UTC m=+991.840903590" watchObservedRunningTime="2025-12-06 08:28:29.290886795 +0000 UTC m=+991.866591833" Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.295615 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-hqktq" podStartSLOduration=24.069799093 podStartE2EDuration="32.295605392s" podCreationTimestamp="2025-12-06 08:27:57 +0000 UTC" firstStartedPulling="2025-12-06 08:28:08.212393871 +0000 UTC m=+970.788098909" lastFinishedPulling="2025-12-06 08:28:16.43820017 +0000 UTC m=+979.013905208" observedRunningTime="2025-12-06 08:28:29.287381431 +0000 UTC m=+991.863086469" watchObservedRunningTime="2025-12-06 08:28:29.295605392 +0000 UTC m=+991.871310430" Dec 06 08:28:29 crc kubenswrapper[4763]: I1206 08:28:29.355188 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=11.561876875 podStartE2EDuration="32.355170341s" podCreationTimestamp="2025-12-06 08:27:57 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.616945238 +0000 UTC m=+970.192650276" lastFinishedPulling="2025-12-06 08:28:28.410238704 +0000 UTC m=+990.985943742" observedRunningTime="2025-12-06 08:28:29.351618385 +0000 UTC m=+991.927323423" watchObservedRunningTime="2025-12-06 08:28:29.355170341 +0000 UTC m=+991.930875379" Dec 06 08:28:30 crc kubenswrapper[4763]: I1206 08:28:30.162280 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:30 crc kubenswrapper[4763]: E1206 08:28:30.162857 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:30 crc kubenswrapper[4763]: E1206 08:28:30.162872 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:30 crc kubenswrapper[4763]: E1206 08:28:30.162968 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:34.162955214 +0000 UTC m=+996.738660252 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:30 crc kubenswrapper[4763]: I1206 08:28:30.262264 4763 generic.go:334] "Generic (PLEG): container finished" podID="de1b0280-c39f-4e3d-98b9-cdbb0085e6e1" containerID="b6ef96d4320e6b803fa48520515162cbf2a8ed5447e6efcd5c60e74fd1a5262c" exitCode=0 Dec 06 08:28:30 crc kubenswrapper[4763]: I1206 08:28:30.262325 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1","Type":"ContainerDied","Data":"b6ef96d4320e6b803fa48520515162cbf2a8ed5447e6efcd5c60e74fd1a5262c"} Dec 06 08:28:30 crc kubenswrapper[4763]: I1206 08:28:30.265406 4763 generic.go:334] "Generic (PLEG): container finished" podID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerID="8fceb5dee96f9acd0be7de5fe00e3439e42f671032ccfcac2b49a6b2cb5d6e5e" exitCode=0 Dec 06 08:28:30 crc kubenswrapper[4763]: I1206 08:28:30.265460 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" event={"ID":"664db788-e1cc-494f-a331-ebc3e7fa7bc8","Type":"ContainerDied","Data":"8fceb5dee96f9acd0be7de5fe00e3439e42f671032ccfcac2b49a6b2cb5d6e5e"} Dec 06 08:28:31 crc kubenswrapper[4763]: I1206 08:28:31.277003 4763 generic.go:334] "Generic (PLEG): container finished" podID="00c834db-e265-44e2-9915-2be0931014a5" containerID="b24dfa746f06309fc6539be78697f9987764a136fc75bfe5f3d1efca152ad22a" exitCode=0 Dec 06 08:28:31 crc kubenswrapper[4763]: I1206 08:28:31.277045 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00c834db-e265-44e2-9915-2be0931014a5","Type":"ContainerDied","Data":"b24dfa746f06309fc6539be78697f9987764a136fc75bfe5f3d1efca152ad22a"} Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.042177 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.088514 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.162124 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.162165 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.224526 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.292084 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-crchz" event={"ID":"e74906c2-6446-4cb0-a428-61609a969406","Type":"ContainerStarted","Data":"2c3b1f9f0a95e111462f2c1fd2c1a26ea4cb7cdf38cd9a08c576b1998d8e176e"} Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.308195 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" event={"ID":"664db788-e1cc-494f-a331-ebc3e7fa7bc8","Type":"ContainerStarted","Data":"b8313d1fe233c398d086d63dc7a108e4d203705345bda81a12eb63b48fc1e61c"} Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.314786 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00c834db-e265-44e2-9915-2be0931014a5","Type":"ContainerStarted","Data":"9de126d280ea17b1e0128e24abbe502aed90280839d11b014912e1fc6ae0b181"} Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.317023 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"de1b0280-c39f-4e3d-98b9-cdbb0085e6e1","Type":"ContainerStarted","Data":"350f52d533dc239f66a1adee4156bb87b695d72c8e98c3631f1ce5bda60556d6"} Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.317376 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.345196 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-crchz" podStartSLOduration=3.664419552 podStartE2EDuration="6.345177081s" podCreationTimestamp="2025-12-06 08:28:26 +0000 UTC" firstStartedPulling="2025-12-06 08:28:28.927837032 +0000 UTC m=+991.503542060" lastFinishedPulling="2025-12-06 08:28:31.608594541 +0000 UTC m=+994.184299589" observedRunningTime="2025-12-06 08:28:32.322263933 +0000 UTC m=+994.897968961" watchObservedRunningTime="2025-12-06 08:28:32.345177081 +0000 UTC m=+994.920882119" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.369799 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=34.11569971 podStartE2EDuration="43.369780686s" podCreationTimestamp="2025-12-06 08:27:49 +0000 UTC" firstStartedPulling="2025-12-06 08:28:06.696064925 +0000 UTC m=+969.271769953" lastFinishedPulling="2025-12-06 08:28:15.950145881 +0000 UTC m=+978.525850929" observedRunningTime="2025-12-06 08:28:32.341820121 +0000 UTC m=+994.917525179" watchObservedRunningTime="2025-12-06 08:28:32.369780686 +0000 UTC m=+994.945485724" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.371340 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.373604 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=32.10362512 podStartE2EDuration="41.373594929s" podCreationTimestamp="2025-12-06 08:27:51 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.629089356 +0000 UTC m=+970.204794394" lastFinishedPulling="2025-12-06 08:28:16.899059155 +0000 UTC m=+979.474764203" observedRunningTime="2025-12-06 08:28:32.366473037 +0000 UTC m=+994.942178075" watchObservedRunningTime="2025-12-06 08:28:32.373594929 +0000 UTC m=+994.949299967" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.386545 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.396459 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" podStartSLOduration=7.396437196 podStartE2EDuration="7.396437196s" podCreationTimestamp="2025-12-06 08:28:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:32.384777221 +0000 UTC m=+994.960482259" watchObservedRunningTime="2025-12-06 08:28:32.396437196 +0000 UTC m=+994.972142234" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.486966 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.487141 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.570519 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.584874 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-556985dd7-d5qgk"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.586139 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.590753 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.599081 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-zwkh6"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.600447 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.604916 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.609053 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-556985dd7-d5qgk"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.644967 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zwkh6"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.716877 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.716940 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z9s9\" (UniqueName: \"kubernetes.io/projected/d935db21-d7d0-4f7a-8d65-121e5263c242-kube-api-access-4z9s9\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.716982 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717001 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovs-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717027 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovn-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717084 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d935db21-d7d0-4f7a-8d65-121e5263c242-config\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lhck\" (UniqueName: \"kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717131 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717151 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.717165 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-combined-ca-bundle\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.779184 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-556985dd7-d5qgk"] Dec 06 08:28:32 crc kubenswrapper[4763]: E1206 08:28:32.779912 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-6lhck ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-556985dd7-d5qgk" podUID="59b24f43-6cea-44dd-90fa-fedf565f658c" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.820661 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821505 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821555 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-combined-ca-bundle\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821667 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821719 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z9s9\" (UniqueName: \"kubernetes.io/projected/d935db21-d7d0-4f7a-8d65-121e5263c242-kube-api-access-4z9s9\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821763 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821789 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovs-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.821832 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovn-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.822008 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d935db21-d7d0-4f7a-8d65-121e5263c242-config\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.822040 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lhck\" (UniqueName: \"kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.822082 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.822411 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.823142 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.827547 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.827810 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.828002 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.828541 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.828721 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d935db21-d7d0-4f7a-8d65-121e5263c242-config\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.829073 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovn-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.829322 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-fhm8w" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.829324 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.829833 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d935db21-d7d0-4f7a-8d65-121e5263c242-ovs-rundir\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.832484 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.832937 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-combined-ca-bundle\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.836954 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.837608 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.839503 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d935db21-d7d0-4f7a-8d65-121e5263c242-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.839680 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.848866 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.867332 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z9s9\" (UniqueName: \"kubernetes.io/projected/d935db21-d7d0-4f7a-8d65-121e5263c242-kube-api-access-4z9s9\") pod \"ovn-controller-metrics-zwkh6\" (UID: \"d935db21-d7d0-4f7a-8d65-121e5263c242\") " pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.876728 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lhck\" (UniqueName: \"kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck\") pod \"dnsmasq-dns-556985dd7-d5qgk\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.923738 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.923851 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc7fn\" (UniqueName: \"kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.923995 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924028 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-scripts\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924051 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924075 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924112 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-config\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924156 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924184 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924210 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924258 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.924291 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfsgz\" (UniqueName: \"kubernetes.io/projected/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-kube-api-access-cfsgz\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:32 crc kubenswrapper[4763]: I1206 08:28:32.940625 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zwkh6" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025638 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc7fn\" (UniqueName: \"kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025707 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-scripts\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025725 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025743 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025775 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-config\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025806 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025833 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025853 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025893 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025929 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfsgz\" (UniqueName: \"kubernetes.io/projected/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-kube-api-access-cfsgz\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.025962 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.026498 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.026718 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-scripts\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.028305 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.028564 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-config\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.029726 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.034532 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.035002 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.041222 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.045043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.046845 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc7fn\" (UniqueName: \"kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn\") pod \"dnsmasq-dns-7b64889c67-6xmsx\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.053582 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfsgz\" (UniqueName: \"kubernetes.io/projected/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-kube-api-access-cfsgz\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.055763 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e80cac8a-fa8e-4e8d-bc78-d3962d6921dc-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc\") " pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.070695 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.101824 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.336055 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.336696 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.350143 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.448266 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc\") pod \"59b24f43-6cea-44dd-90fa-fedf565f658c\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.448334 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lhck\" (UniqueName: \"kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck\") pod \"59b24f43-6cea-44dd-90fa-fedf565f658c\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.448402 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb\") pod \"59b24f43-6cea-44dd-90fa-fedf565f658c\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.448444 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config\") pod \"59b24f43-6cea-44dd-90fa-fedf565f658c\" (UID: \"59b24f43-6cea-44dd-90fa-fedf565f658c\") " Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.449137 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "59b24f43-6cea-44dd-90fa-fedf565f658c" (UID: "59b24f43-6cea-44dd-90fa-fedf565f658c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.449395 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.449474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config" (OuterVolumeSpecName: "config") pod "59b24f43-6cea-44dd-90fa-fedf565f658c" (UID: "59b24f43-6cea-44dd-90fa-fedf565f658c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.451063 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "59b24f43-6cea-44dd-90fa-fedf565f658c" (UID: "59b24f43-6cea-44dd-90fa-fedf565f658c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.452048 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck" (OuterVolumeSpecName: "kube-api-access-6lhck") pod "59b24f43-6cea-44dd-90fa-fedf565f658c" (UID: "59b24f43-6cea-44dd-90fa-fedf565f658c"). InnerVolumeSpecName "kube-api-access-6lhck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.551433 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.551733 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lhck\" (UniqueName: \"kubernetes.io/projected/59b24f43-6cea-44dd-90fa-fedf565f658c-kube-api-access-6lhck\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.551746 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59b24f43-6cea-44dd-90fa-fedf565f658c-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.636559 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zwkh6"] Dec 06 08:28:33 crc kubenswrapper[4763]: W1206 08:28:33.656800 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd935db21_d7d0_4f7a_8d65_121e5263c242.slice/crio-f623f4158d1fe902fa79c487b1eae50a32e98e628ef48400a032429e903a6b4f WatchSource:0}: Error finding container f623f4158d1fe902fa79c487b1eae50a32e98e628ef48400a032429e903a6b4f: Status 404 returned error can't find the container with id f623f4158d1fe902fa79c487b1eae50a32e98e628ef48400a032429e903a6b4f Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.703636 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 06 08:28:33 crc kubenswrapper[4763]: W1206 08:28:33.705310 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode80cac8a_fa8e_4e8d_bc78_d3962d6921dc.slice/crio-def6391ea4f31e9d7811c85dc78062ca522126c8ab5abd73649d9d621061f0cc WatchSource:0}: Error finding container def6391ea4f31e9d7811c85dc78062ca522126c8ab5abd73649d9d621061f0cc: Status 404 returned error can't find the container with id def6391ea4f31e9d7811c85dc78062ca522126c8ab5abd73649d9d621061f0cc Dec 06 08:28:33 crc kubenswrapper[4763]: I1206 08:28:33.796651 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:28:33 crc kubenswrapper[4763]: W1206 08:28:33.797508 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecf53c07_1690_45db_91fe_1afe6fb21de6.slice/crio-9eb592ddb2ffa1430ae443103637c786c1161933a954383172b5607e5aee298b WatchSource:0}: Error finding container 9eb592ddb2ffa1430ae443103637c786c1161933a954383172b5607e5aee298b: Status 404 returned error can't find the container with id 9eb592ddb2ffa1430ae443103637c786c1161933a954383172b5607e5aee298b Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.163601 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:34 crc kubenswrapper[4763]: E1206 08:28:34.163822 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:34 crc kubenswrapper[4763]: E1206 08:28:34.163852 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:34 crc kubenswrapper[4763]: E1206 08:28:34.163924 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:42.163886423 +0000 UTC m=+1004.739591461 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.344249 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc","Type":"ContainerStarted","Data":"def6391ea4f31e9d7811c85dc78062ca522126c8ab5abd73649d9d621061f0cc"} Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.346267 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" event={"ID":"ecf53c07-1690-45db-91fe-1afe6fb21de6","Type":"ContainerStarted","Data":"9eb592ddb2ffa1430ae443103637c786c1161933a954383172b5607e5aee298b"} Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.347567 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zwkh6" event={"ID":"d935db21-d7d0-4f7a-8d65-121e5263c242","Type":"ContainerStarted","Data":"f623f4158d1fe902fa79c487b1eae50a32e98e628ef48400a032429e903a6b4f"} Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.347979 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="dnsmasq-dns" containerID="cri-o://b8313d1fe233c398d086d63dc7a108e4d203705345bda81a12eb63b48fc1e61c" gracePeriod=10 Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.348516 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-556985dd7-d5qgk" Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.394087 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-556985dd7-d5qgk"] Dec 06 08:28:34 crc kubenswrapper[4763]: I1206 08:28:34.401988 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-556985dd7-d5qgk"] Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.360112 4763 generic.go:334] "Generic (PLEG): container finished" podID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerID="b9222022e597a8416cc532aa4e0d9e0c1453dd2985212f42969e1267ae4dbcd8" exitCode=0 Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.360594 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" event={"ID":"ecf53c07-1690-45db-91fe-1afe6fb21de6","Type":"ContainerDied","Data":"b9222022e597a8416cc532aa4e0d9e0c1453dd2985212f42969e1267ae4dbcd8"} Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.367864 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zwkh6" event={"ID":"d935db21-d7d0-4f7a-8d65-121e5263c242","Type":"ContainerStarted","Data":"e53e17606121beeb46c743438b1df224176e173d33a97afbd5b8625dbb64c901"} Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.371254 4763 generic.go:334] "Generic (PLEG): container finished" podID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerID="b8313d1fe233c398d086d63dc7a108e4d203705345bda81a12eb63b48fc1e61c" exitCode=0 Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.371289 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" event={"ID":"664db788-e1cc-494f-a331-ebc3e7fa7bc8","Type":"ContainerDied","Data":"b8313d1fe233c398d086d63dc7a108e4d203705345bda81a12eb63b48fc1e61c"} Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.407105 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-zwkh6" podStartSLOduration=3.4070799640000002 podStartE2EDuration="3.407079964s" podCreationTimestamp="2025-12-06 08:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:35.395741548 +0000 UTC m=+997.971446596" watchObservedRunningTime="2025-12-06 08:28:35.407079964 +0000 UTC m=+997.982785002" Dec 06 08:28:35 crc kubenswrapper[4763]: I1206 08:28:35.754853 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b24f43-6cea-44dd-90fa-fedf565f658c" path="/var/lib/kubelet/pods/59b24f43-6cea-44dd-90fa-fedf565f658c/volumes" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.160677 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.237832 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc\") pod \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.238120 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config\") pod \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.238245 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2wnp\" (UniqueName: \"kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp\") pod \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\" (UID: \"664db788-e1cc-494f-a331-ebc3e7fa7bc8\") " Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.245630 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp" (OuterVolumeSpecName: "kube-api-access-s2wnp") pod "664db788-e1cc-494f-a331-ebc3e7fa7bc8" (UID: "664db788-e1cc-494f-a331-ebc3e7fa7bc8"). InnerVolumeSpecName "kube-api-access-s2wnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.291790 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "664db788-e1cc-494f-a331-ebc3e7fa7bc8" (UID: "664db788-e1cc-494f-a331-ebc3e7fa7bc8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.295247 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config" (OuterVolumeSpecName: "config") pod "664db788-e1cc-494f-a331-ebc3e7fa7bc8" (UID: "664db788-e1cc-494f-a331-ebc3e7fa7bc8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.340666 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2wnp\" (UniqueName: \"kubernetes.io/projected/664db788-e1cc-494f-a331-ebc3e7fa7bc8-kube-api-access-s2wnp\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.340712 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.340725 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664db788-e1cc-494f-a331-ebc3e7fa7bc8-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.380365 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" event={"ID":"664db788-e1cc-494f-a331-ebc3e7fa7bc8","Type":"ContainerDied","Data":"b5ae889c54d9592113f50054db841a212c13723892e4776c1e844ebefc0a07e4"} Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.381209 4763 scope.go:117] "RemoveContainer" containerID="b8313d1fe233c398d086d63dc7a108e4d203705345bda81a12eb63b48fc1e61c" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.380413 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c69c9c9cf-rzlxr" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.383765 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc","Type":"ContainerStarted","Data":"c060c430334c5dd43b9a4030a655520edb5e2f28a18c5152d8256848ad4b6f23"} Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.383823 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e80cac8a-fa8e-4e8d-bc78-d3962d6921dc","Type":"ContainerStarted","Data":"9df8e406f84cdafb501abc29464df2668fd43d7d3573a862eb1614a18b0fdb72"} Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.384321 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.388172 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" event={"ID":"ecf53c07-1690-45db-91fe-1afe6fb21de6","Type":"ContainerStarted","Data":"58bacf47dedad5f6678f2b96accbd27b2e47f821c90944b6fe3b00c8f8effb18"} Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.388368 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.397407 4763 scope.go:117] "RemoveContainer" containerID="8fceb5dee96f9acd0be7de5fe00e3439e42f671032ccfcac2b49a6b2cb5d6e5e" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.416967 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.511720656 podStartE2EDuration="4.416946874s" podCreationTimestamp="2025-12-06 08:28:32 +0000 UTC" firstStartedPulling="2025-12-06 08:28:33.725579577 +0000 UTC m=+996.301284625" lastFinishedPulling="2025-12-06 08:28:35.630805805 +0000 UTC m=+998.206510843" observedRunningTime="2025-12-06 08:28:36.411821556 +0000 UTC m=+998.987526594" watchObservedRunningTime="2025-12-06 08:28:36.416946874 +0000 UTC m=+998.992651912" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.438732 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" podStartSLOduration=4.438712191 podStartE2EDuration="4.438712191s" podCreationTimestamp="2025-12-06 08:28:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:36.434860777 +0000 UTC m=+999.010565825" watchObservedRunningTime="2025-12-06 08:28:36.438712191 +0000 UTC m=+999.014417229" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.459987 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.467009 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c69c9c9cf-rzlxr"] Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.718162 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 06 08:28:36 crc kubenswrapper[4763]: I1206 08:28:36.846038 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 06 08:28:37 crc kubenswrapper[4763]: I1206 08:28:37.730455 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" path="/var/lib/kubelet/pods/664db788-e1cc-494f-a331-ebc3e7fa7bc8/volumes" Dec 06 08:28:40 crc kubenswrapper[4763]: I1206 08:28:40.971185 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 06 08:28:40 crc kubenswrapper[4763]: I1206 08:28:40.972757 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 06 08:28:41 crc kubenswrapper[4763]: I1206 08:28:41.212264 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 06 08:28:41 crc kubenswrapper[4763]: I1206 08:28:41.440090 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerStarted","Data":"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc"} Dec 06 08:28:41 crc kubenswrapper[4763]: I1206 08:28:41.558739 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.233106 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:42 crc kubenswrapper[4763]: E1206 08:28:42.233317 4763 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 06 08:28:42 crc kubenswrapper[4763]: E1206 08:28:42.233354 4763 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 06 08:28:42 crc kubenswrapper[4763]: E1206 08:28:42.233421 4763 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift podName:df22632a-c5cb-4636-abfe-48f60e1df901 nodeName:}" failed. No retries permitted until 2025-12-06 08:28:58.233400859 +0000 UTC m=+1020.809105907 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift") pod "swift-storage-0" (UID: "df22632a-c5cb-4636-abfe-48f60e1df901") : configmap "swift-ring-files" not found Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.334728 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5c8f-account-create-update-cr74m"] Dec 06 08:28:42 crc kubenswrapper[4763]: E1206 08:28:42.335133 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="dnsmasq-dns" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.335169 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="dnsmasq-dns" Dec 06 08:28:42 crc kubenswrapper[4763]: E1206 08:28:42.335187 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="init" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.335194 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="init" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.335361 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="664db788-e1cc-494f-a331-ebc3e7fa7bc8" containerName="dnsmasq-dns" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.335922 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.338846 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.350294 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-lkfrr"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.351836 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.364004 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lkfrr"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.375214 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c8f-account-create-update-cr74m"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.439094 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q26rg\" (UniqueName: \"kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.439456 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.439610 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8hx\" (UniqueName: \"kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.439650 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.542274 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-thw8d"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.550990 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.551625 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q26rg\" (UniqueName: \"kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.551688 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.551828 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8hx\" (UniqueName: \"kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.552843 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.554712 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.559692 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.595134 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-thw8d"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.644713 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-40e8-account-create-update-hpdth"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.646144 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.648813 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.653735 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-40e8-account-create-update-hpdth"] Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.690024 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.690596 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq7k7\" (UniqueName: \"kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.691006 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh8jn\" (UniqueName: \"kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.691255 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.768757 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8hx\" (UniqueName: \"kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx\") pod \"keystone-db-create-lkfrr\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.778448 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q26rg\" (UniqueName: \"kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg\") pod \"keystone-5c8f-account-create-update-cr74m\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.804008 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.804067 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq7k7\" (UniqueName: \"kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.804197 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh8jn\" (UniqueName: \"kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.804281 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.805063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.805621 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.844781 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq7k7\" (UniqueName: \"kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7\") pod \"placement-40e8-account-create-update-hpdth\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.871001 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh8jn\" (UniqueName: \"kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn\") pod \"placement-db-create-thw8d\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.899741 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-thw8d" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.956328 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:42 crc kubenswrapper[4763]: I1206 08:28:42.976272 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.015456 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.105068 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.182344 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.193480 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="dnsmasq-dns" containerID="cri-o://1594cb17e4d0dcba8bc61d42079f694ba08d20fb3c8461e9d4dc67a4a2f7e5f3" gracePeriod=10 Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.408450 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-thw8d"] Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.457092 4763 generic.go:334] "Generic (PLEG): container finished" podID="e74906c2-6446-4cb0-a428-61609a969406" containerID="2c3b1f9f0a95e111462f2c1fd2c1a26ea4cb7cdf38cd9a08c576b1998d8e176e" exitCode=0 Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.457185 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-crchz" event={"ID":"e74906c2-6446-4cb0-a428-61609a969406","Type":"ContainerDied","Data":"2c3b1f9f0a95e111462f2c1fd2c1a26ea4cb7cdf38cd9a08c576b1998d8e176e"} Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.458717 4763 generic.go:334] "Generic (PLEG): container finished" podID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerID="1594cb17e4d0dcba8bc61d42079f694ba08d20fb3c8461e9d4dc67a4a2f7e5f3" exitCode=0 Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.459473 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" event={"ID":"293062e3-bba3-4bb9-a750-586c7285d5b1","Type":"ContainerDied","Data":"1594cb17e4d0dcba8bc61d42079f694ba08d20fb3c8461e9d4dc67a4a2f7e5f3"} Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.564182 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c8f-account-create-update-cr74m"] Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.573472 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lkfrr"] Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.714180 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:28:43 crc kubenswrapper[4763]: W1206 08:28:43.825969 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1447b4d5_1e23_43a9_9877_7e5ed71f3c72.slice/crio-975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88 WatchSource:0}: Error finding container 975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88: Status 404 returned error can't find the container with id 975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88 Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.833047 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjfln\" (UniqueName: \"kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln\") pod \"293062e3-bba3-4bb9-a750-586c7285d5b1\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.833162 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config\") pod \"293062e3-bba3-4bb9-a750-586c7285d5b1\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.833277 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc\") pod \"293062e3-bba3-4bb9-a750-586c7285d5b1\" (UID: \"293062e3-bba3-4bb9-a750-586c7285d5b1\") " Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.838259 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-40e8-account-create-update-hpdth"] Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.865396 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln" (OuterVolumeSpecName: "kube-api-access-fjfln") pod "293062e3-bba3-4bb9-a750-586c7285d5b1" (UID: "293062e3-bba3-4bb9-a750-586c7285d5b1"). InnerVolumeSpecName "kube-api-access-fjfln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.889562 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config" (OuterVolumeSpecName: "config") pod "293062e3-bba3-4bb9-a750-586c7285d5b1" (UID: "293062e3-bba3-4bb9-a750-586c7285d5b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.890777 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "293062e3-bba3-4bb9-a750-586c7285d5b1" (UID: "293062e3-bba3-4bb9-a750-586c7285d5b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.935108 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjfln\" (UniqueName: \"kubernetes.io/projected/293062e3-bba3-4bb9-a750-586c7285d5b1-kube-api-access-fjfln\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.935148 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:43 crc kubenswrapper[4763]: I1206 08:28:43.935160 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/293062e3-bba3-4bb9-a750-586c7285d5b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.467058 4763 generic.go:334] "Generic (PLEG): container finished" podID="1447b4d5-1e23-43a9-9877-7e5ed71f3c72" containerID="806d2244100fa5497cbafd99d48d832c9cd284895e9dc7f0cefc187c47fc7f35" exitCode=0 Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.467302 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-40e8-account-create-update-hpdth" event={"ID":"1447b4d5-1e23-43a9-9877-7e5ed71f3c72","Type":"ContainerDied","Data":"806d2244100fa5497cbafd99d48d832c9cd284895e9dc7f0cefc187c47fc7f35"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.467379 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-40e8-account-create-update-hpdth" event={"ID":"1447b4d5-1e23-43a9-9877-7e5ed71f3c72","Type":"ContainerStarted","Data":"975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.470949 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" event={"ID":"293062e3-bba3-4bb9-a750-586c7285d5b1","Type":"ContainerDied","Data":"b3fd8e2bc1b353a3ca2827ec6e53da58c546424c0c1f2230e2a1a6264c79e758"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.470987 4763 scope.go:117] "RemoveContainer" containerID="1594cb17e4d0dcba8bc61d42079f694ba08d20fb3c8461e9d4dc67a4a2f7e5f3" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.471099 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc86f595f-rr6mx" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.473410 4763 generic.go:334] "Generic (PLEG): container finished" podID="7728e4ef-8ef4-414c-9cd9-274b386d59bb" containerID="3edfb35313457a44f4e0fe1f1a6a82372499f4f074dcf5cf167a3d4b3f2891fa" exitCode=0 Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.473477 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lkfrr" event={"ID":"7728e4ef-8ef4-414c-9cd9-274b386d59bb","Type":"ContainerDied","Data":"3edfb35313457a44f4e0fe1f1a6a82372499f4f074dcf5cf167a3d4b3f2891fa"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.473507 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lkfrr" event={"ID":"7728e4ef-8ef4-414c-9cd9-274b386d59bb","Type":"ContainerStarted","Data":"1851913fd66dac182062ece9b3d52d217ec34d347afbb4cbd048c9316a5b6554"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.474691 4763 generic.go:334] "Generic (PLEG): container finished" podID="35fd0671-e6bc-429c-a2e5-2f7757c7cda4" containerID="3d89cc63d31fb14cc2f1250eee02dfc4c5759af8f928fca3a1c284cb17637b0e" exitCode=0 Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.474735 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-thw8d" event={"ID":"35fd0671-e6bc-429c-a2e5-2f7757c7cda4","Type":"ContainerDied","Data":"3d89cc63d31fb14cc2f1250eee02dfc4c5759af8f928fca3a1c284cb17637b0e"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.474753 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-thw8d" event={"ID":"35fd0671-e6bc-429c-a2e5-2f7757c7cda4","Type":"ContainerStarted","Data":"fc77fe1dda69ed1d2ff9eaa9d4c190701e032e72fd41fd00dda4c25a936de9e5"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.476607 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerStarted","Data":"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.477674 4763 generic.go:334] "Generic (PLEG): container finished" podID="cb37a078-cd5e-4ca9-aac8-068f790e3a5a" containerID="b3315cec3c6162ea65cd93ee402ec90d5bede19a1224465d073abfdafaafe894" exitCode=0 Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.477877 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c8f-account-create-update-cr74m" event={"ID":"cb37a078-cd5e-4ca9-aac8-068f790e3a5a","Type":"ContainerDied","Data":"b3315cec3c6162ea65cd93ee402ec90d5bede19a1224465d073abfdafaafe894"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.477937 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c8f-account-create-update-cr74m" event={"ID":"cb37a078-cd5e-4ca9-aac8-068f790e3a5a","Type":"ContainerStarted","Data":"770352ee495cf9410f431def07a7efa9af15e0d7e7ed5f281e0a5e884912c0d4"} Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.516858 4763 scope.go:117] "RemoveContainer" containerID="c1852ac14b3439d451dbe63838c82dd9db7981907472d40943436e4be0f89712" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.526196 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.540763 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fc86f595f-rr6mx"] Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.769790 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951142 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951460 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951531 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951614 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951678 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951706 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxj6s\" (UniqueName: \"kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.951748 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle\") pod \"e74906c2-6446-4cb0-a428-61609a969406\" (UID: \"e74906c2-6446-4cb0-a428-61609a969406\") " Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.952127 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.953078 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.957510 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s" (OuterVolumeSpecName: "kube-api-access-nxj6s") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "kube-api-access-nxj6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.959865 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.979358 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.981881 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts" (OuterVolumeSpecName: "scripts") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:44 crc kubenswrapper[4763]: I1206 08:28:44.983020 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e74906c2-6446-4cb0-a428-61609a969406" (UID: "e74906c2-6446-4cb0-a428-61609a969406"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053814 4763 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e74906c2-6446-4cb0-a428-61609a969406-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053857 4763 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053871 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxj6s\" (UniqueName: \"kubernetes.io/projected/e74906c2-6446-4cb0-a428-61609a969406-kube-api-access-nxj6s\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053886 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053914 4763 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053928 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e74906c2-6446-4cb0-a428-61609a969406-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.053939 4763 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e74906c2-6446-4cb0-a428-61609a969406-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.135453 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-xqr8s"] Dec 06 08:28:45 crc kubenswrapper[4763]: E1206 08:28:45.135798 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="init" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.135816 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="init" Dec 06 08:28:45 crc kubenswrapper[4763]: E1206 08:28:45.135835 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74906c2-6446-4cb0-a428-61609a969406" containerName="swift-ring-rebalance" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.135841 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74906c2-6446-4cb0-a428-61609a969406" containerName="swift-ring-rebalance" Dec 06 08:28:45 crc kubenswrapper[4763]: E1206 08:28:45.135853 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="dnsmasq-dns" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.135859 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="dnsmasq-dns" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.136024 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74906c2-6446-4cb0-a428-61609a969406" containerName="swift-ring-rebalance" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.136066 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" containerName="dnsmasq-dns" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.136596 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.151882 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-xqr8s"] Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.227139 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-8ebe-account-create-update-b574x"] Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.228940 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.231085 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.245963 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-8ebe-account-create-update-b574x"] Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.258627 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2c98\" (UniqueName: \"kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.258763 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.360727 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p75mh\" (UniqueName: \"kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.360804 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2c98\" (UniqueName: \"kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.360827 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.360882 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.361559 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.393123 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2c98\" (UniqueName: \"kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98\") pod \"watcher-db-create-xqr8s\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.457563 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.462199 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p75mh\" (UniqueName: \"kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.462263 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.463071 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.479964 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p75mh\" (UniqueName: \"kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh\") pod \"watcher-8ebe-account-create-update-b574x\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.493854 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-crchz" event={"ID":"e74906c2-6446-4cb0-a428-61609a969406","Type":"ContainerDied","Data":"33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0"} Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.493893 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33c2af4fcbed0a6061a8554a35e9873c32bf9b1ee881ed1f2f55a994b09a31d0" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.493953 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-crchz" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.548140 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:45 crc kubenswrapper[4763]: I1206 08:28:45.754931 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="293062e3-bba3-4bb9-a750-586c7285d5b1" path="/var/lib/kubelet/pods/293062e3-bba3-4bb9-a750-586c7285d5b1/volumes" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.138799 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.149377 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-thw8d" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.277947 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts\") pod \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.278052 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts\") pod \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.278192 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq7k7\" (UniqueName: \"kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7\") pod \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\" (UID: \"1447b4d5-1e23-43a9-9877-7e5ed71f3c72\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.278245 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh8jn\" (UniqueName: \"kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn\") pod \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\" (UID: \"35fd0671-e6bc-429c-a2e5-2f7757c7cda4\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.279111 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1447b4d5-1e23-43a9-9877-7e5ed71f3c72" (UID: "1447b4d5-1e23-43a9-9877-7e5ed71f3c72"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.279135 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35fd0671-e6bc-429c-a2e5-2f7757c7cda4" (UID: "35fd0671-e6bc-429c-a2e5-2f7757c7cda4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.283121 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7" (OuterVolumeSpecName: "kube-api-access-mq7k7") pod "1447b4d5-1e23-43a9-9877-7e5ed71f3c72" (UID: "1447b4d5-1e23-43a9-9877-7e5ed71f3c72"). InnerVolumeSpecName "kube-api-access-mq7k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.284982 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn" (OuterVolumeSpecName: "kube-api-access-gh8jn") pod "35fd0671-e6bc-429c-a2e5-2f7757c7cda4" (UID: "35fd0671-e6bc-429c-a2e5-2f7757c7cda4"). InnerVolumeSpecName "kube-api-access-gh8jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.307545 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.311657 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-8ebe-account-create-update-b574x"] Dec 06 08:28:46 crc kubenswrapper[4763]: W1206 08:28:46.317718 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2355f72_6850_4c5e_ae74_d0525a8bd9e3.slice/crio-a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2 WatchSource:0}: Error finding container a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2: Status 404 returned error can't find the container with id a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2 Dec 06 08:28:46 crc kubenswrapper[4763]: W1206 08:28:46.323390 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92a312a1_7032_4fa4_b14a_0874d22ac4ee.slice/crio-27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe WatchSource:0}: Error finding container 27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe: Status 404 returned error can't find the container with id 27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.323682 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.327858 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-xqr8s"] Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.380443 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.380485 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.380498 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq7k7\" (UniqueName: \"kubernetes.io/projected/1447b4d5-1e23-43a9-9877-7e5ed71f3c72-kube-api-access-mq7k7\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.380512 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh8jn\" (UniqueName: \"kubernetes.io/projected/35fd0671-e6bc-429c-a2e5-2f7757c7cda4-kube-api-access-gh8jn\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.481186 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts\") pod \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.481222 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts\") pod \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.481257 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q26rg\" (UniqueName: \"kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg\") pod \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\" (UID: \"cb37a078-cd5e-4ca9-aac8-068f790e3a5a\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.481355 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8hx\" (UniqueName: \"kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx\") pod \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\" (UID: \"7728e4ef-8ef4-414c-9cd9-274b386d59bb\") " Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.482107 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cb37a078-cd5e-4ca9-aac8-068f790e3a5a" (UID: "cb37a078-cd5e-4ca9-aac8-068f790e3a5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.485423 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg" (OuterVolumeSpecName: "kube-api-access-q26rg") pod "cb37a078-cd5e-4ca9-aac8-068f790e3a5a" (UID: "cb37a078-cd5e-4ca9-aac8-068f790e3a5a"). InnerVolumeSpecName "kube-api-access-q26rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.486543 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7728e4ef-8ef4-414c-9cd9-274b386d59bb" (UID: "7728e4ef-8ef4-414c-9cd9-274b386d59bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.486549 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx" (OuterVolumeSpecName: "kube-api-access-jc8hx") pod "7728e4ef-8ef4-414c-9cd9-274b386d59bb" (UID: "7728e4ef-8ef4-414c-9cd9-274b386d59bb"). InnerVolumeSpecName "kube-api-access-jc8hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.521846 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-thw8d" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.522014 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-thw8d" event={"ID":"35fd0671-e6bc-429c-a2e5-2f7757c7cda4","Type":"ContainerDied","Data":"fc77fe1dda69ed1d2ff9eaa9d4c190701e032e72fd41fd00dda4c25a936de9e5"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.522057 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc77fe1dda69ed1d2ff9eaa9d4c190701e032e72fd41fd00dda4c25a936de9e5" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.525279 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c8f-account-create-update-cr74m" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.525291 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c8f-account-create-update-cr74m" event={"ID":"cb37a078-cd5e-4ca9-aac8-068f790e3a5a","Type":"ContainerDied","Data":"770352ee495cf9410f431def07a7efa9af15e0d7e7ed5f281e0a5e884912c0d4"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.525353 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="770352ee495cf9410f431def07a7efa9af15e0d7e7ed5f281e0a5e884912c0d4" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.527007 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-40e8-account-create-update-hpdth" event={"ID":"1447b4d5-1e23-43a9-9877-7e5ed71f3c72","Type":"ContainerDied","Data":"975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.527058 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="975255a21abed4d8ddb12854dd1d416e491d56bd12083e0a59932c61b15eda88" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.527028 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-40e8-account-create-update-hpdth" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.528317 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xqr8s" event={"ID":"92a312a1-7032-4fa4-b14a-0874d22ac4ee","Type":"ContainerStarted","Data":"3b15c58d58c885565088ed306767d38f556e333a2d8260d50a63aa1bc9fee519"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.528359 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xqr8s" event={"ID":"92a312a1-7032-4fa4-b14a-0874d22ac4ee","Type":"ContainerStarted","Data":"27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.530560 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8ebe-account-create-update-b574x" event={"ID":"e2355f72-6850-4c5e-ae74-d0525a8bd9e3","Type":"ContainerStarted","Data":"a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.532065 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lkfrr" event={"ID":"7728e4ef-8ef4-414c-9cd9-274b386d59bb","Type":"ContainerDied","Data":"1851913fd66dac182062ece9b3d52d217ec34d347afbb4cbd048c9316a5b6554"} Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.532093 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1851913fd66dac182062ece9b3d52d217ec34d347afbb4cbd048c9316a5b6554" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.532113 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lkfrr" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.550644 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-create-xqr8s" podStartSLOduration=1.5506183789999999 podStartE2EDuration="1.550618379s" podCreationTimestamp="2025-12-06 08:28:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:46.547029122 +0000 UTC m=+1009.122734180" watchObservedRunningTime="2025-12-06 08:28:46.550618379 +0000 UTC m=+1009.126323427" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.583937 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7728e4ef-8ef4-414c-9cd9-274b386d59bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.583972 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.583986 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q26rg\" (UniqueName: \"kubernetes.io/projected/cb37a078-cd5e-4ca9-aac8-068f790e3a5a-kube-api-access-q26rg\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:46 crc kubenswrapper[4763]: I1206 08:28:46.584000 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8hx\" (UniqueName: \"kubernetes.io/projected/7728e4ef-8ef4-414c-9cd9-274b386d59bb-kube-api-access-jc8hx\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:47 crc kubenswrapper[4763]: I1206 08:28:47.173395 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-8ebe-account-create-update-b574x" podStartSLOduration=2.173372636 podStartE2EDuration="2.173372636s" podCreationTimestamp="2025-12-06 08:28:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:28:46.565110761 +0000 UTC m=+1009.140815789" watchObservedRunningTime="2025-12-06 08:28:47.173372636 +0000 UTC m=+1009.749077674" Dec 06 08:28:47 crc kubenswrapper[4763]: I1206 08:28:47.540565 4763 generic.go:334] "Generic (PLEG): container finished" podID="92a312a1-7032-4fa4-b14a-0874d22ac4ee" containerID="3b15c58d58c885565088ed306767d38f556e333a2d8260d50a63aa1bc9fee519" exitCode=0 Dec 06 08:28:47 crc kubenswrapper[4763]: I1206 08:28:47.541566 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xqr8s" event={"ID":"92a312a1-7032-4fa4-b14a-0874d22ac4ee","Type":"ContainerDied","Data":"3b15c58d58c885565088ed306767d38f556e333a2d8260d50a63aa1bc9fee519"} Dec 06 08:28:47 crc kubenswrapper[4763]: I1206 08:28:47.543841 4763 generic.go:334] "Generic (PLEG): container finished" podID="e2355f72-6850-4c5e-ae74-d0525a8bd9e3" containerID="f63919fa96a37f34a635bc08d1a30da474a7ebf0eb7f03cca87ec7e9244be44f" exitCode=0 Dec 06 08:28:47 crc kubenswrapper[4763]: I1206 08:28:47.543880 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8ebe-account-create-update-b574x" event={"ID":"e2355f72-6850-4c5e-ae74-d0525a8bd9e3","Type":"ContainerDied","Data":"f63919fa96a37f34a635bc08d1a30da474a7ebf0eb7f03cca87ec7e9244be44f"} Dec 06 08:28:48 crc kubenswrapper[4763]: I1206 08:28:48.131624 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 06 08:28:48 crc kubenswrapper[4763]: I1206 08:28:48.215747 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mvnv6" podUID="abd10dfb-5dd9-4271-94aa-60b8fed4ba2b" containerName="ovn-controller" probeResult="failure" output=< Dec 06 08:28:48 crc kubenswrapper[4763]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 06 08:28:48 crc kubenswrapper[4763]: > Dec 06 08:28:48 crc kubenswrapper[4763]: I1206 08:28:48.557768 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerStarted","Data":"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea"} Dec 06 08:28:48 crc kubenswrapper[4763]: I1206 08:28:48.605179 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=13.25081643 podStartE2EDuration="53.605157019s" podCreationTimestamp="2025-12-06 08:27:55 +0000 UTC" firstStartedPulling="2025-12-06 08:28:07.550698305 +0000 UTC m=+970.126403343" lastFinishedPulling="2025-12-06 08:28:47.905038894 +0000 UTC m=+1010.480743932" observedRunningTime="2025-12-06 08:28:48.589761324 +0000 UTC m=+1011.165466402" watchObservedRunningTime="2025-12-06 08:28:48.605157019 +0000 UTC m=+1011.180862067" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.019479 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.025959 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.130354 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts\") pod \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.130454 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2c98\" (UniqueName: \"kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98\") pod \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.130564 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p75mh\" (UniqueName: \"kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh\") pod \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\" (UID: \"e2355f72-6850-4c5e-ae74-d0525a8bd9e3\") " Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.130585 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts\") pod \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\" (UID: \"92a312a1-7032-4fa4-b14a-0874d22ac4ee\") " Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.131374 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "92a312a1-7032-4fa4-b14a-0874d22ac4ee" (UID: "92a312a1-7032-4fa4-b14a-0874d22ac4ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.131620 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2355f72-6850-4c5e-ae74-d0525a8bd9e3" (UID: "e2355f72-6850-4c5e-ae74-d0525a8bd9e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.143790 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98" (OuterVolumeSpecName: "kube-api-access-p2c98") pod "92a312a1-7032-4fa4-b14a-0874d22ac4ee" (UID: "92a312a1-7032-4fa4-b14a-0874d22ac4ee"). InnerVolumeSpecName "kube-api-access-p2c98". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.155034 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh" (OuterVolumeSpecName: "kube-api-access-p75mh") pod "e2355f72-6850-4c5e-ae74-d0525a8bd9e3" (UID: "e2355f72-6850-4c5e-ae74-d0525a8bd9e3"). InnerVolumeSpecName "kube-api-access-p75mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.232712 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.232754 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2c98\" (UniqueName: \"kubernetes.io/projected/92a312a1-7032-4fa4-b14a-0874d22ac4ee-kube-api-access-p2c98\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.232766 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p75mh\" (UniqueName: \"kubernetes.io/projected/e2355f72-6850-4c5e-ae74-d0525a8bd9e3-kube-api-access-p75mh\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.232775 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92a312a1-7032-4fa4-b14a-0874d22ac4ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.568045 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-xqr8s" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.568643 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-xqr8s" event={"ID":"92a312a1-7032-4fa4-b14a-0874d22ac4ee","Type":"ContainerDied","Data":"27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe"} Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.568726 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27dace2225618a7a64e2a535e1c89a1e038a0f13dd7024213b25cf47a37246fe" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.570312 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-8ebe-account-create-update-b574x" event={"ID":"e2355f72-6850-4c5e-ae74-d0525a8bd9e3","Type":"ContainerDied","Data":"a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2"} Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.570366 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a967d53a3a62a0707f6733c9493935153d070dbfeb31faed78b91a74e3483db2" Dec 06 08:28:49 crc kubenswrapper[4763]: I1206 08:28:49.570385 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-8ebe-account-create-update-b574x" Dec 06 08:28:50 crc kubenswrapper[4763]: I1206 08:28:50.615031 4763 generic.go:334] "Generic (PLEG): container finished" podID="e18a4dfa-5953-422a-be11-7ae83ab5ec09" containerID="95bde55144df899e1de9b7726ea5fd2c9861a0b97343ca6da1379a05e03d206e" exitCode=0 Dec 06 08:28:50 crc kubenswrapper[4763]: I1206 08:28:50.615117 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"e18a4dfa-5953-422a-be11-7ae83ab5ec09","Type":"ContainerDied","Data":"95bde55144df899e1de9b7726ea5fd2c9861a0b97343ca6da1379a05e03d206e"} Dec 06 08:28:50 crc kubenswrapper[4763]: I1206 08:28:50.617055 4763 generic.go:334] "Generic (PLEG): container finished" podID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerID="3b79be3dce95fdf97dc1243a8fffaa1bba79366e8c1a443fffc8ef3b3a25f3f0" exitCode=0 Dec 06 08:28:50 crc kubenswrapper[4763]: I1206 08:28:50.617087 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerDied","Data":"3b79be3dce95fdf97dc1243a8fffaa1bba79366e8c1a443fffc8ef3b3a25f3f0"} Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.497586 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.626041 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerStarted","Data":"ae5bc594ad1b99faf621d1efacc68bfb3ff0154d9cc21ebf49cdbf64ae13b156"} Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.626381 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.628644 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"e18a4dfa-5953-422a-be11-7ae83ab5ec09","Type":"ContainerStarted","Data":"e25d5986b2f05861c9bb24b4daddc9710882618febce54470521c6a772d19c99"} Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.628822 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.657305 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=55.904755463 podStartE2EDuration="1m4.657288417s" podCreationTimestamp="2025-12-06 08:27:47 +0000 UTC" firstStartedPulling="2025-12-06 08:28:06.955760546 +0000 UTC m=+969.531465584" lastFinishedPulling="2025-12-06 08:28:15.7082935 +0000 UTC m=+978.283998538" observedRunningTime="2025-12-06 08:28:51.652801376 +0000 UTC m=+1014.228506424" watchObservedRunningTime="2025-12-06 08:28:51.657288417 +0000 UTC m=+1014.232993455" Dec 06 08:28:51 crc kubenswrapper[4763]: I1206 08:28:51.681264 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=55.85800758 podStartE2EDuration="1m4.681248755s" podCreationTimestamp="2025-12-06 08:27:47 +0000 UTC" firstStartedPulling="2025-12-06 08:28:06.219742709 +0000 UTC m=+968.795447747" lastFinishedPulling="2025-12-06 08:28:15.042983884 +0000 UTC m=+977.618688922" observedRunningTime="2025-12-06 08:28:51.674553144 +0000 UTC m=+1014.250258182" watchObservedRunningTime="2025-12-06 08:28:51.681248755 +0000 UTC m=+1014.256953793" Dec 06 08:28:53 crc kubenswrapper[4763]: I1206 08:28:53.210785 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mvnv6" podUID="abd10dfb-5dd9-4271-94aa-60b8fed4ba2b" containerName="ovn-controller" probeResult="failure" output=< Dec 06 08:28:53 crc kubenswrapper[4763]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 06 08:28:53 crc kubenswrapper[4763]: > Dec 06 08:28:53 crc kubenswrapper[4763]: I1206 08:28:53.248444 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:28:53 crc kubenswrapper[4763]: I1206 08:28:53.645619 4763 generic.go:334] "Generic (PLEG): container finished" podID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerID="025330a3d2028eb40b7d062410c48bd3146ff2cd92275be82d0b7b00bf80c41c" exitCode=0 Dec 06 08:28:53 crc kubenswrapper[4763]: I1206 08:28:53.645677 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerDied","Data":"025330a3d2028eb40b7d062410c48bd3146ff2cd92275be82d0b7b00bf80c41c"} Dec 06 08:28:54 crc kubenswrapper[4763]: I1206 08:28:54.655916 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerStarted","Data":"92e7ea483e4f925d1daa0f9bfaf84cc0536f9ae19fb35ee4c514d7710912b8a5"} Dec 06 08:28:54 crc kubenswrapper[4763]: I1206 08:28:54.656631 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:28:54 crc kubenswrapper[4763]: I1206 08:28:54.688015 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=57.94989947 podStartE2EDuration="1m7.687995117s" podCreationTimestamp="2025-12-06 08:27:47 +0000 UTC" firstStartedPulling="2025-12-06 08:28:06.700105453 +0000 UTC m=+969.275810491" lastFinishedPulling="2025-12-06 08:28:16.4382011 +0000 UTC m=+979.013906138" observedRunningTime="2025-12-06 08:28:54.682317983 +0000 UTC m=+1017.258023041" watchObservedRunningTime="2025-12-06 08:28:54.687995117 +0000 UTC m=+1017.263700155" Dec 06 08:28:56 crc kubenswrapper[4763]: I1206 08:28:56.497469 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 06 08:28:56 crc kubenswrapper[4763]: I1206 08:28:56.499554 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 06 08:28:56 crc kubenswrapper[4763]: I1206 08:28:56.674318 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 06 08:28:58 crc kubenswrapper[4763]: I1206 08:28:58.210583 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mvnv6" podUID="abd10dfb-5dd9-4271-94aa-60b8fed4ba2b" containerName="ovn-controller" probeResult="failure" output=< Dec 06 08:28:58 crc kubenswrapper[4763]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 06 08:28:58 crc kubenswrapper[4763]: > Dec 06 08:28:58 crc kubenswrapper[4763]: I1206 08:28:58.296706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:58 crc kubenswrapper[4763]: I1206 08:28:58.303080 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/df22632a-c5cb-4636-abfe-48f60e1df901-etc-swift\") pod \"swift-storage-0\" (UID: \"df22632a-c5cb-4636-abfe-48f60e1df901\") " pod="openstack/swift-storage-0" Dec 06 08:28:58 crc kubenswrapper[4763]: I1206 08:28:58.520534 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 06 08:28:59 crc kubenswrapper[4763]: W1206 08:28:59.106829 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf22632a_c5cb_4636_abfe_48f60e1df901.slice/crio-cb6771d4e72436a3e7fa9ce5d4ff90a89b717df9b467a385b85912e30385e4eb WatchSource:0}: Error finding container cb6771d4e72436a3e7fa9ce5d4ff90a89b717df9b467a385b85912e30385e4eb: Status 404 returned error can't find the container with id cb6771d4e72436a3e7fa9ce5d4ff90a89b717df9b467a385b85912e30385e4eb Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.109215 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.717024 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"cb6771d4e72436a3e7fa9ce5d4ff90a89b717df9b467a385b85912e30385e4eb"} Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.936562 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.936862 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="prometheus" containerID="cri-o://72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" gracePeriod=600 Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.936932 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="config-reloader" containerID="cri-o://238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" gracePeriod=600 Dec 06 08:28:59 crc kubenswrapper[4763]: I1206 08:28:59.936932 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="thanos-sidecar" containerID="cri-o://6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" gracePeriod=600 Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.586774 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.729885 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"af77efb3d4ca611efe3c7bb604f85dd03a4cb076299649b9d806fcd87b46ea5f"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.729940 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"4f8cc82e1190a75a322cfa017d0d01466ca7152f1f0e58ace8c02fc83a50a697"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.729950 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"4ccc68c59faadf474ad98d05f465d62a4ebc3da701e994d73fcddf986aeeca39"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736112 4763 generic.go:334] "Generic (PLEG): container finished" podID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" exitCode=0 Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736137 4763 generic.go:334] "Generic (PLEG): container finished" podID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" exitCode=0 Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736144 4763 generic.go:334] "Generic (PLEG): container finished" podID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" exitCode=0 Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736180 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerDied","Data":"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736249 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736267 4763 scope.go:117] "RemoveContainer" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736254 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerDied","Data":"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736409 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerDied","Data":"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736428 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d80731d8-91ee-438d-a5c8-da9d36005e6f","Type":"ContainerDied","Data":"de10312443d2f8f1ddc81a6fcf180d601768b87904925a1bfb812370569908d4"} Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736494 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736593 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736618 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736692 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wgcp\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736724 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736779 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.736870 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"d80731d8-91ee-438d-a5c8-da9d36005e6f\" (UID: \"d80731d8-91ee-438d-a5c8-da9d36005e6f\") " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.738376 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.745853 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.746097 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config" (OuterVolumeSpecName: "config") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.746352 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.751171 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp" (OuterVolumeSpecName: "kube-api-access-6wgcp") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "kube-api-access-6wgcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.752079 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out" (OuterVolumeSpecName: "config-out") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.770667 4763 scope.go:117] "RemoveContainer" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.776100 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "pvc-4183127b-ba41-4321-a678-4acbb0114b73". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.794500 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config" (OuterVolumeSpecName: "web-config") pod "d80731d8-91ee-438d-a5c8-da9d36005e6f" (UID: "d80731d8-91ee-438d-a5c8-da9d36005e6f"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.804680 4763 scope.go:117] "RemoveContainer" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839310 4763 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d80731d8-91ee-438d-a5c8-da9d36005e6f-config-out\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839348 4763 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839363 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wgcp\" (UniqueName: \"kubernetes.io/projected/d80731d8-91ee-438d-a5c8-da9d36005e6f-kube-api-access-6wgcp\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839375 4763 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-web-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839384 4763 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d80731d8-91ee-438d-a5c8-da9d36005e6f-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839411 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") on node \"crc\" " Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839424 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.839436 4763 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d80731d8-91ee-438d-a5c8-da9d36005e6f-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.866711 4763 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.866877 4763 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4183127b-ba41-4321-a678-4acbb0114b73" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73") on node "crc" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.894079 4763 scope.go:117] "RemoveContainer" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.914152 4763 scope.go:117] "RemoveContainer" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" Dec 06 08:29:00 crc kubenswrapper[4763]: E1206 08:29:00.914558 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": container with ID starting with 6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea not found: ID does not exist" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.914595 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea"} err="failed to get container status \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": rpc error: code = NotFound desc = could not find container \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": container with ID starting with 6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.914622 4763 scope.go:117] "RemoveContainer" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" Dec 06 08:29:00 crc kubenswrapper[4763]: E1206 08:29:00.914890 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": container with ID starting with 238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d not found: ID does not exist" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915181 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d"} err="failed to get container status \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": rpc error: code = NotFound desc = could not find container \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": container with ID starting with 238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915253 4763 scope.go:117] "RemoveContainer" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" Dec 06 08:29:00 crc kubenswrapper[4763]: E1206 08:29:00.915485 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": container with ID starting with 72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc not found: ID does not exist" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915511 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc"} err="failed to get container status \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": rpc error: code = NotFound desc = could not find container \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": container with ID starting with 72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915527 4763 scope.go:117] "RemoveContainer" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" Dec 06 08:29:00 crc kubenswrapper[4763]: E1206 08:29:00.915769 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": container with ID starting with 344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec not found: ID does not exist" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915795 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec"} err="failed to get container status \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": rpc error: code = NotFound desc = could not find container \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": container with ID starting with 344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.915811 4763 scope.go:117] "RemoveContainer" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.916295 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea"} err="failed to get container status \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": rpc error: code = NotFound desc = could not find container \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": container with ID starting with 6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.916319 4763 scope.go:117] "RemoveContainer" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.916749 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d"} err="failed to get container status \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": rpc error: code = NotFound desc = could not find container \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": container with ID starting with 238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.916772 4763 scope.go:117] "RemoveContainer" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917198 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc"} err="failed to get container status \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": rpc error: code = NotFound desc = could not find container \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": container with ID starting with 72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917224 4763 scope.go:117] "RemoveContainer" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917444 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec"} err="failed to get container status \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": rpc error: code = NotFound desc = could not find container \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": container with ID starting with 344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917461 4763 scope.go:117] "RemoveContainer" containerID="6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917699 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea"} err="failed to get container status \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": rpc error: code = NotFound desc = could not find container \"6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea\": container with ID starting with 6efe2bc28ec5b58e74360187067e696c0a64c8a60c88a657f91ba372d837d3ea not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917722 4763 scope.go:117] "RemoveContainer" containerID="238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917876 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d"} err="failed to get container status \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": rpc error: code = NotFound desc = could not find container \"238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d\": container with ID starting with 238e14ba0325ac4f4540e1a9fc5250e432c88e99ac4f2c5a0df3f7876ef30d9d not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.917910 4763 scope.go:117] "RemoveContainer" containerID="72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.918216 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc"} err="failed to get container status \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": rpc error: code = NotFound desc = could not find container \"72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc\": container with ID starting with 72bf1bf62b96f4f68fb4ab424785046f8e13b73a48c197080859d36877280fcc not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.918238 4763 scope.go:117] "RemoveContainer" containerID="344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.918493 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec"} err="failed to get container status \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": rpc error: code = NotFound desc = could not find container \"344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec\": container with ID starting with 344baecf339394788e78e60635f337b268cc7982d436063f8d27d67a4bd94cec not found: ID does not exist" Dec 06 08:29:00 crc kubenswrapper[4763]: I1206 08:29:00.962608 4763 reconciler_common.go:293] "Volume detached for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.070447 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.077611 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104162 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104491 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb37a078-cd5e-4ca9-aac8-068f790e3a5a" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104508 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb37a078-cd5e-4ca9-aac8-068f790e3a5a" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104527 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1447b4d5-1e23-43a9-9877-7e5ed71f3c72" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104534 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1447b4d5-1e23-43a9-9877-7e5ed71f3c72" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104549 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92a312a1-7032-4fa4-b14a-0874d22ac4ee" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104555 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="92a312a1-7032-4fa4-b14a-0874d22ac4ee" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104562 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2355f72-6850-4c5e-ae74-d0525a8bd9e3" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104570 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2355f72-6850-4c5e-ae74-d0525a8bd9e3" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104590 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="init-config-reloader" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104598 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="init-config-reloader" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104607 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="prometheus" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104613 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="prometheus" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104618 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="thanos-sidecar" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104624 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="thanos-sidecar" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104634 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="config-reloader" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104639 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="config-reloader" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104653 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fd0671-e6bc-429c-a2e5-2f7757c7cda4" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104659 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fd0671-e6bc-429c-a2e5-2f7757c7cda4" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: E1206 08:29:01.104672 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7728e4ef-8ef4-414c-9cd9-274b386d59bb" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104679 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7728e4ef-8ef4-414c-9cd9-274b386d59bb" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104843 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7728e4ef-8ef4-414c-9cd9-274b386d59bb" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104861 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="prometheus" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104871 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="config-reloader" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104885 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="92a312a1-7032-4fa4-b14a-0874d22ac4ee" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104935 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="35fd0671-e6bc-429c-a2e5-2f7757c7cda4" containerName="mariadb-database-create" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104950 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb37a078-cd5e-4ca9-aac8-068f790e3a5a" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104958 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" containerName="thanos-sidecar" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104967 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2355f72-6850-4c5e-ae74-d0525a8bd9e3" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.104977 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="1447b4d5-1e23-43a9-9877-7e5ed71f3c72" containerName="mariadb-account-create-update" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.106393 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.108958 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.110237 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.110287 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.111293 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.111873 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.113722 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-b6l5c" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.116218 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.123576 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268282 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268353 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268397 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcsmn\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268434 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268477 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268617 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268746 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268878 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.268973 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.269033 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.269077 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.370853 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.370982 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcsmn\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371012 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371044 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371081 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371106 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371138 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371167 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371188 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371212 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.371245 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.372180 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.373580 4763 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.373616 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b015cf8d820501a423550d1415408204b77e53ec4d768da3cd0e5c2a5ce9ba08/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.376688 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.376753 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.376795 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.377030 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.378679 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.379235 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.380099 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.380298 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.394141 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcsmn\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.414517 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.428216 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.740052 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d80731d8-91ee-438d-a5c8-da9d36005e6f" path="/var/lib/kubelet/pods/d80731d8-91ee-438d-a5c8-da9d36005e6f/volumes" Dec 06 08:29:01 crc kubenswrapper[4763]: I1206 08:29:01.748744 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"a9efde646d05511af7bac32a96f8a9335b1068f5878cd560782397aeb589d7fb"} Dec 06 08:29:02 crc kubenswrapper[4763]: W1206 08:29:02.108063 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2a10389_6ee5_4381_b89b_b2ec5b3ab985.slice/crio-db7b747bbebe15ab6bf20c821e5dfdfeda9865d10cc435d9e7de52c8984ebdd5 WatchSource:0}: Error finding container db7b747bbebe15ab6bf20c821e5dfdfeda9865d10cc435d9e7de52c8984ebdd5: Status 404 returned error can't find the container with id db7b747bbebe15ab6bf20c821e5dfdfeda9865d10cc435d9e7de52c8984ebdd5 Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.108652 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.772119 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerStarted","Data":"db7b747bbebe15ab6bf20c821e5dfdfeda9865d10cc435d9e7de52c8984ebdd5"} Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.778885 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"9a3391b897b4aa0f394df244f5dc6ece46c7150290194ed3e3e33e3930a57a55"} Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.778933 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"97aa1693b60aad8a7c2d1efc1d40ddb118dfa8ee494e3644c3cb35e5ff20665d"} Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.778942 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"b9e986d1ac48a80c9c97576320bc1159cf2ae136e41ee5acccdedccc3298bff9"} Dec 06 08:29:02 crc kubenswrapper[4763]: I1206 08:29:02.778951 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"5643196fece0fdcd5356cfc1af9197d005456342da940ac6bdad70008aa0fb04"} Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.234789 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mvnv6" podUID="abd10dfb-5dd9-4271-94aa-60b8fed4ba2b" containerName="ovn-controller" probeResult="failure" output=< Dec 06 08:29:03 crc kubenswrapper[4763]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 06 08:29:03 crc kubenswrapper[4763]: > Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.270967 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hqktq" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.489674 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mvnv6-config-2kdlv"] Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.491275 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.495610 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.500371 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mvnv6-config-2kdlv"] Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.608636 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.608761 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.608804 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.608856 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.609025 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.609257 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thn4r\" (UniqueName: \"kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710257 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710379 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thn4r\" (UniqueName: \"kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710405 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710446 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710466 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710501 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710674 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710737 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.710766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.711394 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.712284 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.734886 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thn4r\" (UniqueName: \"kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r\") pod \"ovn-controller-mvnv6-config-2kdlv\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.793522 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"b08b3a6d22444fbf2657eb24e4629515e9d9348b17863217eb2c217514bbec3a"} Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.794524 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"9ccce360ed7c404e27e031c988b12e4b10287846e214b9072343bdff6021d3af"} Dec 06 08:29:03 crc kubenswrapper[4763]: I1206 08:29:03.809087 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:04 crc kubenswrapper[4763]: I1206 08:29:04.341471 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mvnv6-config-2kdlv"] Dec 06 08:29:04 crc kubenswrapper[4763]: I1206 08:29:04.804078 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerStarted","Data":"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0"} Dec 06 08:29:04 crc kubenswrapper[4763]: I1206 08:29:04.808597 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mvnv6-config-2kdlv" event={"ID":"ecf3cb8d-5f40-4e4c-a84d-810d69720f34","Type":"ContainerStarted","Data":"a6db8400e6dd759bb9971224915245b0e626716693ad7ddbbe7e234506f9c993"} Dec 06 08:29:04 crc kubenswrapper[4763]: I1206 08:29:04.825518 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"f4e25a5a014d005dfb0498120972ca2af94c8bf1605cca0d88142ec6f3457d99"} Dec 06 08:29:04 crc kubenswrapper[4763]: I1206 08:29:04.825801 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"5a7f3766e055d6f90a89e64d8f64c24ecc2a806c05d6e674ed8cebcfde605e4b"} Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.838875 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"bb27db73a4e1296fc97143634d9adc1fa9cfa546a2776c6181af71f5ff2d880b"} Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.839181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"62c30b520a1517ec55ed233c0ada244ed7bc7c8c487edb8c298898efd2c73c86"} Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.839194 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"df22632a-c5cb-4636-abfe-48f60e1df901","Type":"ContainerStarted","Data":"5cbe27e57cbb9f826f3360b0ddcc861ef7c3bde1597df5c11e919782a8d6d518"} Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.840891 4763 generic.go:334] "Generic (PLEG): container finished" podID="ecf3cb8d-5f40-4e4c-a84d-810d69720f34" containerID="8bf783d1994ffac8d83cc1c4f7d3f9c286a83009090459e4f3b05e1b0c096f2f" exitCode=0 Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.841244 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mvnv6-config-2kdlv" event={"ID":"ecf3cb8d-5f40-4e4c-a84d-810d69720f34","Type":"ContainerDied","Data":"8bf783d1994ffac8d83cc1c4f7d3f9c286a83009090459e4f3b05e1b0c096f2f"} Dec 06 08:29:05 crc kubenswrapper[4763]: I1206 08:29:05.876614 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.831396343 podStartE2EDuration="40.876595758s" podCreationTimestamp="2025-12-06 08:28:25 +0000 UTC" firstStartedPulling="2025-12-06 08:28:59.109360679 +0000 UTC m=+1021.685065717" lastFinishedPulling="2025-12-06 08:29:03.154560104 +0000 UTC m=+1025.730265132" observedRunningTime="2025-12-06 08:29:05.871600683 +0000 UTC m=+1028.447305721" watchObservedRunningTime="2025-12-06 08:29:05.876595758 +0000 UTC m=+1028.452300786" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.134389 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.136134 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.139022 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.143443 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257136 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257373 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dwnx\" (UniqueName: \"kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257424 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257617 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257773 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.257847 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359794 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359825 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359875 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359952 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dwnx\" (UniqueName: \"kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.359974 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.361043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.361095 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.361118 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.361126 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.361739 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.379061 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dwnx\" (UniqueName: \"kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx\") pod \"dnsmasq-dns-5956b77d5c-kbjvz\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:06 crc kubenswrapper[4763]: I1206 08:29:06.705203 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:07 crc kubenswrapper[4763]: W1206 08:29:07.240252 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72235379_a68d_41c8_9e7d_2880837b22d7.slice/crio-c7fd6ef7275ffc87dcfadda5c74773cb38860754436fb01b68fe77a9a255f2f5 WatchSource:0}: Error finding container c7fd6ef7275ffc87dcfadda5c74773cb38860754436fb01b68fe77a9a255f2f5: Status 404 returned error can't find the container with id c7fd6ef7275ffc87dcfadda5c74773cb38860754436fb01b68fe77a9a255f2f5 Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.241869 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.259330 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420173 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thn4r\" (UniqueName: \"kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420393 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420527 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420594 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.420643 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.421483 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.422674 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.422722 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run\") pod \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\" (UID: \"ecf3cb8d-5f40-4e4c-a84d-810d69720f34\") " Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.423119 4763 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.423144 4763 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.423157 4763 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.423189 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run" (OuterVolumeSpecName: "var-run") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.423988 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts" (OuterVolumeSpecName: "scripts") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.426109 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r" (OuterVolumeSpecName: "kube-api-access-thn4r") pod "ecf3cb8d-5f40-4e4c-a84d-810d69720f34" (UID: "ecf3cb8d-5f40-4e4c-a84d-810d69720f34"). InnerVolumeSpecName "kube-api-access-thn4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.524460 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.524492 4763 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-var-run\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.524504 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thn4r\" (UniqueName: \"kubernetes.io/projected/ecf3cb8d-5f40-4e4c-a84d-810d69720f34-kube-api-access-thn4r\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.873417 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mvnv6-config-2kdlv" event={"ID":"ecf3cb8d-5f40-4e4c-a84d-810d69720f34","Type":"ContainerDied","Data":"a6db8400e6dd759bb9971224915245b0e626716693ad7ddbbe7e234506f9c993"} Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.873458 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6db8400e6dd759bb9971224915245b0e626716693ad7ddbbe7e234506f9c993" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.873516 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mvnv6-config-2kdlv" Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.877317 4763 generic.go:334] "Generic (PLEG): container finished" podID="72235379-a68d-41c8-9e7d-2880837b22d7" containerID="da9f2648f4cda724f3e9b0b752366e4dff845834f47bf5faa5317fc7e9a5c674" exitCode=0 Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.877376 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" event={"ID":"72235379-a68d-41c8-9e7d-2880837b22d7","Type":"ContainerDied","Data":"da9f2648f4cda724f3e9b0b752366e4dff845834f47bf5faa5317fc7e9a5c674"} Dec 06 08:29:07 crc kubenswrapper[4763]: I1206 08:29:07.877401 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" event={"ID":"72235379-a68d-41c8-9e7d-2880837b22d7","Type":"ContainerStarted","Data":"c7fd6ef7275ffc87dcfadda5c74773cb38860754436fb01b68fe77a9a255f2f5"} Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.253008 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-mvnv6" Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.386234 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-mvnv6-config-2kdlv"] Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.398464 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-mvnv6-config-2kdlv"] Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.585431 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-notifications-server-0" podUID="e18a4dfa-5953-422a-be11-7ae83ab5ec09" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.105:5671: connect: connection refused" Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.875290 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.887865 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" event={"ID":"72235379-a68d-41c8-9e7d-2880837b22d7","Type":"ContainerStarted","Data":"bdd8adec75d19e417f66dadf92294dbbabd65123b7cfc021dab86b9a8e30a3f2"} Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.888072 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:08 crc kubenswrapper[4763]: I1206 08:29:08.919500 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podStartSLOduration=2.919480396 podStartE2EDuration="2.919480396s" podCreationTimestamp="2025-12-06 08:29:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:08.913344241 +0000 UTC m=+1031.489049289" watchObservedRunningTime="2025-12-06 08:29:08.919480396 +0000 UTC m=+1031.495185434" Dec 06 08:29:09 crc kubenswrapper[4763]: I1206 08:29:09.241093 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 06 08:29:09 crc kubenswrapper[4763]: I1206 08:29:09.729633 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecf3cb8d-5f40-4e4c-a84d-810d69720f34" path="/var/lib/kubelet/pods/ecf3cb8d-5f40-4e4c-a84d-810d69720f34/volumes" Dec 06 08:29:11 crc kubenswrapper[4763]: I1206 08:29:11.907482 4763 generic.go:334] "Generic (PLEG): container finished" podID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerID="a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0" exitCode=0 Dec 06 08:29:11 crc kubenswrapper[4763]: I1206 08:29:11.907571 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerDied","Data":"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0"} Dec 06 08:29:12 crc kubenswrapper[4763]: I1206 08:29:12.926675 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerStarted","Data":"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468"} Dec 06 08:29:15 crc kubenswrapper[4763]: I1206 08:29:15.958494 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerStarted","Data":"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd"} Dec 06 08:29:15 crc kubenswrapper[4763]: I1206 08:29:15.959506 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerStarted","Data":"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab"} Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.005564 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=15.005497364 podStartE2EDuration="15.005497364s" podCreationTimestamp="2025-12-06 08:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:15.991322842 +0000 UTC m=+1038.567027970" watchObservedRunningTime="2025-12-06 08:29:16.005497364 +0000 UTC m=+1038.581202432" Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.429621 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.429692 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.435707 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.707122 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.802730 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.803048 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="dnsmasq-dns" containerID="cri-o://58bacf47dedad5f6678f2b96accbd27b2e47f821c90944b6fe3b00c8f8effb18" gracePeriod=10 Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.979006 4763 generic.go:334] "Generic (PLEG): container finished" podID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerID="58bacf47dedad5f6678f2b96accbd27b2e47f821c90944b6fe3b00c8f8effb18" exitCode=0 Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.979163 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" event={"ID":"ecf53c07-1690-45db-91fe-1afe6fb21de6","Type":"ContainerDied","Data":"58bacf47dedad5f6678f2b96accbd27b2e47f821c90944b6fe3b00c8f8effb18"} Dec 06 08:29:16 crc kubenswrapper[4763]: I1206 08:29:16.983826 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.314525 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.393412 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc\") pod \"ecf53c07-1690-45db-91fe-1afe6fb21de6\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.393555 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb\") pod \"ecf53c07-1690-45db-91fe-1afe6fb21de6\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.393639 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb\") pod \"ecf53c07-1690-45db-91fe-1afe6fb21de6\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.393680 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc7fn\" (UniqueName: \"kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn\") pod \"ecf53c07-1690-45db-91fe-1afe6fb21de6\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.393714 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config\") pod \"ecf53c07-1690-45db-91fe-1afe6fb21de6\" (UID: \"ecf53c07-1690-45db-91fe-1afe6fb21de6\") " Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.399922 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn" (OuterVolumeSpecName: "kube-api-access-lc7fn") pod "ecf53c07-1690-45db-91fe-1afe6fb21de6" (UID: "ecf53c07-1690-45db-91fe-1afe6fb21de6"). InnerVolumeSpecName "kube-api-access-lc7fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.437655 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ecf53c07-1690-45db-91fe-1afe6fb21de6" (UID: "ecf53c07-1690-45db-91fe-1afe6fb21de6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.443666 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ecf53c07-1690-45db-91fe-1afe6fb21de6" (UID: "ecf53c07-1690-45db-91fe-1afe6fb21de6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.445237 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ecf53c07-1690-45db-91fe-1afe6fb21de6" (UID: "ecf53c07-1690-45db-91fe-1afe6fb21de6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.445395 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config" (OuterVolumeSpecName: "config") pod "ecf53c07-1690-45db-91fe-1afe6fb21de6" (UID: "ecf53c07-1690-45db-91fe-1afe6fb21de6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.496830 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.496866 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.496878 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.496887 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc7fn\" (UniqueName: \"kubernetes.io/projected/ecf53c07-1690-45db-91fe-1afe6fb21de6-kube-api-access-lc7fn\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.496914 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf53c07-1690-45db-91fe-1afe6fb21de6-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.989127 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.989116 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b64889c67-6xmsx" event={"ID":"ecf53c07-1690-45db-91fe-1afe6fb21de6","Type":"ContainerDied","Data":"9eb592ddb2ffa1430ae443103637c786c1161933a954383172b5607e5aee298b"} Dec 06 08:29:17 crc kubenswrapper[4763]: I1206 08:29:17.989537 4763 scope.go:117] "RemoveContainer" containerID="58bacf47dedad5f6678f2b96accbd27b2e47f821c90944b6fe3b00c8f8effb18" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.015383 4763 scope.go:117] "RemoveContainer" containerID="b9222022e597a8416cc532aa4e0d9e0c1453dd2985212f42969e1267ae4dbcd8" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.016658 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.023456 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b64889c67-6xmsx"] Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.586217 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.785683 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-gb2pf"] Dec 06 08:29:18 crc kubenswrapper[4763]: E1206 08:29:18.786271 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf3cb8d-5f40-4e4c-a84d-810d69720f34" containerName="ovn-config" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.786288 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf3cb8d-5f40-4e4c-a84d-810d69720f34" containerName="ovn-config" Dec 06 08:29:18 crc kubenswrapper[4763]: E1206 08:29:18.786301 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="dnsmasq-dns" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.786308 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="dnsmasq-dns" Dec 06 08:29:18 crc kubenswrapper[4763]: E1206 08:29:18.786318 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="init" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.786323 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="init" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.786485 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" containerName="dnsmasq-dns" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.786504 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecf3cb8d-5f40-4e4c-a84d-810d69720f34" containerName="ovn-config" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.787057 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.800022 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gb2pf"] Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.876354 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.889247 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-ebdf-account-create-update-ksnmx"] Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.890348 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.893297 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.899819 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ebdf-account-create-update-ksnmx"] Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.928532 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdwmf\" (UniqueName: \"kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:18 crc kubenswrapper[4763]: I1206 08:29:18.928574 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.029737 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kck6f\" (UniqueName: \"kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.029846 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.029891 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdwmf\" (UniqueName: \"kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.029939 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.031420 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.051064 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdwmf\" (UniqueName: \"kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf\") pod \"glance-db-create-gb2pf\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.121319 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.131355 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kck6f\" (UniqueName: \"kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.131447 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.132765 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.151663 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kck6f\" (UniqueName: \"kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f\") pod \"glance-ebdf-account-create-update-ksnmx\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.211513 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.244238 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:29:19 crc kubenswrapper[4763]: W1206 08:29:19.617027 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05a15474_6053_4ab8_8c89_52d1289f9397.slice/crio-db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc WatchSource:0}: Error finding container db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc: Status 404 returned error can't find the container with id db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.617888 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gb2pf"] Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.731102 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecf53c07-1690-45db-91fe-1afe6fb21de6" path="/var/lib/kubelet/pods/ecf53c07-1690-45db-91fe-1afe6fb21de6/volumes" Dec 06 08:29:19 crc kubenswrapper[4763]: I1206 08:29:19.807418 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ebdf-account-create-update-ksnmx"] Dec 06 08:29:19 crc kubenswrapper[4763]: W1206 08:29:19.808495 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9455395_06d9_4bc9_9ea6_183dd115655c.slice/crio-0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7 WatchSource:0}: Error finding container 0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7: Status 404 returned error can't find the container with id 0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7 Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.004999 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ebdf-account-create-update-ksnmx" event={"ID":"e9455395-06d9-4bc9-9ea6-183dd115655c","Type":"ContainerStarted","Data":"26bedac173fd8f4092dde4f4ddb33446080392f51581663f24eb704a015743b6"} Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.005044 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ebdf-account-create-update-ksnmx" event={"ID":"e9455395-06d9-4bc9-9ea6-183dd115655c","Type":"ContainerStarted","Data":"0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7"} Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.006767 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gb2pf" event={"ID":"05a15474-6053-4ab8-8c89-52d1289f9397","Type":"ContainerStarted","Data":"36bc5390de3a8fbf12d47ec663649b1af12cad35173b670c6fe6e586bc2c536e"} Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.006801 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gb2pf" event={"ID":"05a15474-6053-4ab8-8c89-52d1289f9397","Type":"ContainerStarted","Data":"db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc"} Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.025346 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-ebdf-account-create-update-ksnmx" podStartSLOduration=2.025317443 podStartE2EDuration="2.025317443s" podCreationTimestamp="2025-12-06 08:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:20.018367376 +0000 UTC m=+1042.594072424" watchObservedRunningTime="2025-12-06 08:29:20.025317443 +0000 UTC m=+1042.601022501" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.033599 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-gb2pf" podStartSLOduration=2.033582587 podStartE2EDuration="2.033582587s" podCreationTimestamp="2025-12-06 08:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:20.032920979 +0000 UTC m=+1042.608626017" watchObservedRunningTime="2025-12-06 08:29:20.033582587 +0000 UTC m=+1042.609287645" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.525213 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-lzmz8"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.526566 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.535978 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-f887-account-create-update-2tnbk"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.537367 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.540527 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.549193 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lzmz8"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.559956 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f887-account-create-update-2tnbk"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.597825 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-kr9kp"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.599392 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.604183 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-tgmcp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.604496 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.663965 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs4nh\" (UniqueName: \"kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664037 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-kr9kp"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664064 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664147 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664180 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664220 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjmbv\" (UniqueName: \"kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664272 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.664380 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rtl7\" (UniqueName: \"kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.678282 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-szpbq"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.679496 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.690143 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3b7d-account-create-update-2ts6f"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.697463 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.702552 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.725650 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-szpbq"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.752750 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3b7d-account-create-update-2ts6f"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765676 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqbb9\" (UniqueName: \"kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765751 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs4nh\" (UniqueName: \"kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765802 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765839 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765937 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765961 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.765986 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pc2b\" (UniqueName: \"kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766013 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766032 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjmbv\" (UniqueName: \"kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766058 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766083 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766123 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rtl7\" (UniqueName: \"kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.766990 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.767203 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.772044 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.772104 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.783132 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs4nh\" (UniqueName: \"kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh\") pod \"barbican-db-create-lzmz8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.784474 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjmbv\" (UniqueName: \"kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv\") pod \"barbican-f887-account-create-update-2tnbk\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.787858 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.790299 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rtl7\" (UniqueName: \"kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7\") pod \"watcher-db-sync-kr9kp\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.835148 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-97llg"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.836206 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.839220 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gm8vr" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.839457 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.839737 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.840398 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.844980 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.859527 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.873935 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874026 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874053 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pc2b\" (UniqueName: \"kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874095 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874122 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64tjt\" (UniqueName: \"kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874154 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.874189 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqbb9\" (UniqueName: \"kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.875195 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.875943 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.878743 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-97llg"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.900877 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqbb9\" (UniqueName: \"kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9\") pod \"cinder-db-create-szpbq\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.917192 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pc2b\" (UniqueName: \"kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b\") pod \"cinder-3b7d-account-create-update-2ts6f\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.935488 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.941925 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-j8569"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.947761 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-j8569" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.958256 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b714-account-create-update-7m2lw"] Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.959840 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.965478 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976324 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shdv6\" (UniqueName: \"kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976392 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976452 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64tjt\" (UniqueName: \"kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976481 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976518 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976535 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zrdd\" (UniqueName: \"kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.976581 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.980637 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.986098 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:20 crc kubenswrapper[4763]: I1206 08:29:20.988855 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-j8569"] Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.022611 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.040646 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64tjt\" (UniqueName: \"kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt\") pod \"keystone-db-sync-97llg\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.042298 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b714-account-create-update-7m2lw"] Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.059467 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.087325 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.087362 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shdv6\" (UniqueName: \"kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.087472 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.087494 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zrdd\" (UniqueName: \"kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.092545 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.092698 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.096565 4763 generic.go:334] "Generic (PLEG): container finished" podID="e9455395-06d9-4bc9-9ea6-183dd115655c" containerID="26bedac173fd8f4092dde4f4ddb33446080392f51581663f24eb704a015743b6" exitCode=0 Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.096645 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ebdf-account-create-update-ksnmx" event={"ID":"e9455395-06d9-4bc9-9ea6-183dd115655c","Type":"ContainerDied","Data":"26bedac173fd8f4092dde4f4ddb33446080392f51581663f24eb704a015743b6"} Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.114361 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shdv6\" (UniqueName: \"kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6\") pod \"neutron-db-create-j8569\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " pod="openstack/neutron-db-create-j8569" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.129091 4763 generic.go:334] "Generic (PLEG): container finished" podID="05a15474-6053-4ab8-8c89-52d1289f9397" containerID="36bc5390de3a8fbf12d47ec663649b1af12cad35173b670c6fe6e586bc2c536e" exitCode=0 Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.129133 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gb2pf" event={"ID":"05a15474-6053-4ab8-8c89-52d1289f9397","Type":"ContainerDied","Data":"36bc5390de3a8fbf12d47ec663649b1af12cad35173b670c6fe6e586bc2c536e"} Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.131146 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zrdd\" (UniqueName: \"kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd\") pod \"neutron-b714-account-create-update-7m2lw\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.256290 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.269345 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-j8569" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.302759 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.717550 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-lzmz8"] Dec 06 08:29:21 crc kubenswrapper[4763]: I1206 08:29:21.744823 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f887-account-create-update-2tnbk"] Dec 06 08:29:21 crc kubenswrapper[4763]: W1206 08:29:21.757629 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd19920e9_3eff_4701_a81c_872cdf0f424c.slice/crio-6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462 WatchSource:0}: Error finding container 6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462: Status 404 returned error can't find the container with id 6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462 Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:21.845416 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-szpbq"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:21.860650 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-kr9kp"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:21.872562 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3b7d-account-create-update-2ts6f"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:21.992271 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b714-account-create-update-7m2lw"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.003348 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-j8569"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.123285 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-97llg"] Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.265673 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b7d-account-create-update-2ts6f" event={"ID":"e8e347b3-3740-4fff-aca7-d6204175fda4","Type":"ContainerStarted","Data":"ae616ebccd61b5cb9e2f895e424c341f30e42d8951a856d2ee31fe7770f99c0a"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.272430 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-j8569" event={"ID":"80dd64d4-0026-4111-b2d3-7428956da9ab","Type":"ContainerStarted","Data":"4e21f7683ae00f2c14ebf057757b545ad8f65ee135a587bce89a5f88ec68a0d5"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.278336 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f887-account-create-update-2tnbk" event={"ID":"d19920e9-3eff-4701-a81c-872cdf0f424c","Type":"ContainerStarted","Data":"875ca722d624a25b1b67554b38e6d2b0f71e4c7fe2c0af499b19dee802a2c068"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.278407 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f887-account-create-update-2tnbk" event={"ID":"d19920e9-3eff-4701-a81c-872cdf0f424c","Type":"ContainerStarted","Data":"6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.284403 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kr9kp" event={"ID":"53409f27-f50b-4048-b355-8bc4b6956cf5","Type":"ContainerStarted","Data":"c6a332692eb895a240f6637ec0b7198f1d731f0a4417de62e52d456fc82c4ca0"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.293056 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b714-account-create-update-7m2lw" event={"ID":"a23d4c9f-a447-4694-8b8f-40c6b7fba10c","Type":"ContainerStarted","Data":"0f75b19c54baf9b040e2f19e736d77e73fe288ad0134c5747ed8d47d03b6da1d"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.296189 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-szpbq" event={"ID":"49a57c04-2386-4579-996e-664b1b58349d","Type":"ContainerStarted","Data":"694582a852d82ba2e134a7b25c5f72e310ea4b33e5d03102467275af5b9c46ed"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.303322 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzmz8" event={"ID":"b061ca68-5423-4891-a883-f5ed470789e8","Type":"ContainerStarted","Data":"bcb0f98ba13b6825e0a12be2db7d686986f96caf4184b73b20ffcf7a5c8dd850"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.303357 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzmz8" event={"ID":"b061ca68-5423-4891-a883-f5ed470789e8","Type":"ContainerStarted","Data":"ef18bb0ec07df8b5bae660cc73e99c502565d317e7f227ea2708ca7e5f583724"} Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.313732 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-f887-account-create-update-2tnbk" podStartSLOduration=2.313711199 podStartE2EDuration="2.313711199s" podCreationTimestamp="2025-12-06 08:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:22.2981812 +0000 UTC m=+1044.873886238" watchObservedRunningTime="2025-12-06 08:29:22.313711199 +0000 UTC m=+1044.889416227" Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.345707 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-lzmz8" podStartSLOduration=2.345686781 podStartE2EDuration="2.345686781s" podCreationTimestamp="2025-12-06 08:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:22.34266131 +0000 UTC m=+1044.918366348" watchObservedRunningTime="2025-12-06 08:29:22.345686781 +0000 UTC m=+1044.921391819" Dec 06 08:29:22 crc kubenswrapper[4763]: I1206 08:29:22.357788 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-szpbq" podStartSLOduration=2.357762708 podStartE2EDuration="2.357762708s" podCreationTimestamp="2025-12-06 08:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:22.315469216 +0000 UTC m=+1044.891174254" watchObservedRunningTime="2025-12-06 08:29:22.357762708 +0000 UTC m=+1044.933467746" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.291305 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.302542 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.328089 4763 generic.go:334] "Generic (PLEG): container finished" podID="b061ca68-5423-4891-a883-f5ed470789e8" containerID="bcb0f98ba13b6825e0a12be2db7d686986f96caf4184b73b20ffcf7a5c8dd850" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.328176 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzmz8" event={"ID":"b061ca68-5423-4891-a883-f5ed470789e8","Type":"ContainerDied","Data":"bcb0f98ba13b6825e0a12be2db7d686986f96caf4184b73b20ffcf7a5c8dd850"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334359 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdwmf\" (UniqueName: \"kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf\") pod \"05a15474-6053-4ab8-8c89-52d1289f9397\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334364 4763 generic.go:334] "Generic (PLEG): container finished" podID="e8e347b3-3740-4fff-aca7-d6204175fda4" containerID="33e3fbf2c5e371b0320c3302d2acca85deae22d856cebd53650ad3bfe9f8606d" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334422 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b7d-account-create-update-2ts6f" event={"ID":"e8e347b3-3740-4fff-aca7-d6204175fda4","Type":"ContainerDied","Data":"33e3fbf2c5e371b0320c3302d2acca85deae22d856cebd53650ad3bfe9f8606d"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334488 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts\") pod \"e9455395-06d9-4bc9-9ea6-183dd115655c\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334522 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts\") pod \"05a15474-6053-4ab8-8c89-52d1289f9397\" (UID: \"05a15474-6053-4ab8-8c89-52d1289f9397\") " Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.334574 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kck6f\" (UniqueName: \"kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f\") pod \"e9455395-06d9-4bc9-9ea6-183dd115655c\" (UID: \"e9455395-06d9-4bc9-9ea6-183dd115655c\") " Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.335400 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e9455395-06d9-4bc9-9ea6-183dd115655c" (UID: "e9455395-06d9-4bc9-9ea6-183dd115655c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.335424 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05a15474-6053-4ab8-8c89-52d1289f9397" (UID: "05a15474-6053-4ab8-8c89-52d1289f9397"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.335784 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9455395-06d9-4bc9-9ea6-183dd115655c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.335808 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05a15474-6053-4ab8-8c89-52d1289f9397-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.340312 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f" (OuterVolumeSpecName: "kube-api-access-kck6f") pod "e9455395-06d9-4bc9-9ea6-183dd115655c" (UID: "e9455395-06d9-4bc9-9ea6-183dd115655c"). InnerVolumeSpecName "kube-api-access-kck6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.344738 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gb2pf" event={"ID":"05a15474-6053-4ab8-8c89-52d1289f9397","Type":"ContainerDied","Data":"db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.344768 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gb2pf" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.344785 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db0916064e8c4e34e7b91ea27c5593c6ad7f7bb9040678098859c9e1707b8afc" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.345134 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf" (OuterVolumeSpecName: "kube-api-access-pdwmf") pod "05a15474-6053-4ab8-8c89-52d1289f9397" (UID: "05a15474-6053-4ab8-8c89-52d1289f9397"). InnerVolumeSpecName "kube-api-access-pdwmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.346824 4763 generic.go:334] "Generic (PLEG): container finished" podID="80dd64d4-0026-4111-b2d3-7428956da9ab" containerID="6114dfaf83a0d4cdbcc31eeab0b3e4db112aeef5a4206e798cc2639d73e94d57" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.346888 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-j8569" event={"ID":"80dd64d4-0026-4111-b2d3-7428956da9ab","Type":"ContainerDied","Data":"6114dfaf83a0d4cdbcc31eeab0b3e4db112aeef5a4206e798cc2639d73e94d57"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.348036 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-97llg" event={"ID":"c133dec0-ffee-47f1-949f-72aeeed1163c","Type":"ContainerStarted","Data":"e63d0488a2722fccdc22537f574f3f57ac145d9be704bb60472c9181b10b11d7"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.349543 4763 generic.go:334] "Generic (PLEG): container finished" podID="49a57c04-2386-4579-996e-664b1b58349d" containerID="28df4f21ed388517c4df0ec401abe953379ef3fba72984b72f46444d1604e9df" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.349592 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-szpbq" event={"ID":"49a57c04-2386-4579-996e-664b1b58349d","Type":"ContainerDied","Data":"28df4f21ed388517c4df0ec401abe953379ef3fba72984b72f46444d1604e9df"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.354202 4763 generic.go:334] "Generic (PLEG): container finished" podID="d19920e9-3eff-4701-a81c-872cdf0f424c" containerID="875ca722d624a25b1b67554b38e6d2b0f71e4c7fe2c0af499b19dee802a2c068" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.354284 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f887-account-create-update-2tnbk" event={"ID":"d19920e9-3eff-4701-a81c-872cdf0f424c","Type":"ContainerDied","Data":"875ca722d624a25b1b67554b38e6d2b0f71e4c7fe2c0af499b19dee802a2c068"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.355879 4763 generic.go:334] "Generic (PLEG): container finished" podID="a23d4c9f-a447-4694-8b8f-40c6b7fba10c" containerID="16ca94014b570767eec8f14789ce7cbb63a7d2e105767b9c7cce4b507446f71b" exitCode=0 Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.355934 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b714-account-create-update-7m2lw" event={"ID":"a23d4c9f-a447-4694-8b8f-40c6b7fba10c","Type":"ContainerDied","Data":"16ca94014b570767eec8f14789ce7cbb63a7d2e105767b9c7cce4b507446f71b"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.379796 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ebdf-account-create-update-ksnmx" event={"ID":"e9455395-06d9-4bc9-9ea6-183dd115655c","Type":"ContainerDied","Data":"0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7"} Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.379835 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fa1a697b77edd1f58e575c00b968f231f8906c23021e3f5911c7551516e5ef7" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.379918 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ebdf-account-create-update-ksnmx" Dec 06 08:29:23 crc kubenswrapper[4763]: E1206 08:29:23.406645 4763 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.18:41144->38.102.83.18:37247: write tcp 38.102.83.18:41144->38.102.83.18:37247: write: broken pipe Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.437814 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kck6f\" (UniqueName: \"kubernetes.io/projected/e9455395-06d9-4bc9-9ea6-183dd115655c-kube-api-access-kck6f\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:23 crc kubenswrapper[4763]: I1206 08:29:23.437855 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdwmf\" (UniqueName: \"kubernetes.io/projected/05a15474-6053-4ab8-8c89-52d1289f9397-kube-api-access-pdwmf\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:23 crc kubenswrapper[4763]: E1206 08:29:23.645177 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9455395_06d9_4bc9_9ea6_183dd115655c.slice\": RecentStats: unable to find data in memory cache]" Dec 06 08:29:27 crc kubenswrapper[4763]: I1206 08:29:27.741095 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:27 crc kubenswrapper[4763]: I1206 08:29:27.908411 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zrdd\" (UniqueName: \"kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd\") pod \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " Dec 06 08:29:27 crc kubenswrapper[4763]: I1206 08:29:27.908511 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts\") pod \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\" (UID: \"a23d4c9f-a447-4694-8b8f-40c6b7fba10c\") " Dec 06 08:29:27 crc kubenswrapper[4763]: I1206 08:29:27.909654 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a23d4c9f-a447-4694-8b8f-40c6b7fba10c" (UID: "a23d4c9f-a447-4694-8b8f-40c6b7fba10c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:27 crc kubenswrapper[4763]: I1206 08:29:27.921615 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd" (OuterVolumeSpecName: "kube-api-access-4zrdd") pod "a23d4c9f-a447-4694-8b8f-40c6b7fba10c" (UID: "a23d4c9f-a447-4694-8b8f-40c6b7fba10c"). InnerVolumeSpecName "kube-api-access-4zrdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:28 crc kubenswrapper[4763]: I1206 08:29:28.016351 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zrdd\" (UniqueName: \"kubernetes.io/projected/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-kube-api-access-4zrdd\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:28 crc kubenswrapper[4763]: I1206 08:29:28.016393 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a23d4c9f-a447-4694-8b8f-40c6b7fba10c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:28 crc kubenswrapper[4763]: I1206 08:29:28.423236 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b714-account-create-update-7m2lw" event={"ID":"a23d4c9f-a447-4694-8b8f-40c6b7fba10c","Type":"ContainerDied","Data":"0f75b19c54baf9b040e2f19e736d77e73fe288ad0134c5747ed8d47d03b6da1d"} Dec 06 08:29:28 crc kubenswrapper[4763]: I1206 08:29:28.423281 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f75b19c54baf9b040e2f19e736d77e73fe288ad0134c5747ed8d47d03b6da1d" Dec 06 08:29:28 crc kubenswrapper[4763]: I1206 08:29:28.423367 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b714-account-create-update-7m2lw" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.048215 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-vpjrt"] Dec 06 08:29:29 crc kubenswrapper[4763]: E1206 08:29:29.048891 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9455395-06d9-4bc9-9ea6-183dd115655c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050369 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9455395-06d9-4bc9-9ea6-183dd115655c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: E1206 08:29:29.050390 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a15474-6053-4ab8-8c89-52d1289f9397" containerName="mariadb-database-create" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050397 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a15474-6053-4ab8-8c89-52d1289f9397" containerName="mariadb-database-create" Dec 06 08:29:29 crc kubenswrapper[4763]: E1206 08:29:29.050414 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a23d4c9f-a447-4694-8b8f-40c6b7fba10c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050421 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a23d4c9f-a447-4694-8b8f-40c6b7fba10c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050588 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9455395-06d9-4bc9-9ea6-183dd115655c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050603 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a23d4c9f-a447-4694-8b8f-40c6b7fba10c" containerName="mariadb-account-create-update" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.050612 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="05a15474-6053-4ab8-8c89-52d1289f9397" containerName="mariadb-database-create" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.051222 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.055658 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9rxtc" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.057532 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.070343 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vpjrt"] Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.137560 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.137619 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.137671 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.137716 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjn24\" (UniqueName: \"kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.239249 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.239327 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.239374 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjn24\" (UniqueName: \"kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.239438 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.245079 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.245121 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.255002 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.258794 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjn24\" (UniqueName: \"kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24\") pod \"glance-db-sync-vpjrt\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:29 crc kubenswrapper[4763]: I1206 08:29:29.370374 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vpjrt" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.728193 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-j8569" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.744367 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.747742 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.771871 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.782552 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883343 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts\") pod \"80dd64d4-0026-4111-b2d3-7428956da9ab\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883395 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts\") pod \"e8e347b3-3740-4fff-aca7-d6204175fda4\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883462 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shdv6\" (UniqueName: \"kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6\") pod \"80dd64d4-0026-4111-b2d3-7428956da9ab\" (UID: \"80dd64d4-0026-4111-b2d3-7428956da9ab\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883532 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pc2b\" (UniqueName: \"kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b\") pod \"e8e347b3-3740-4fff-aca7-d6204175fda4\" (UID: \"e8e347b3-3740-4fff-aca7-d6204175fda4\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883608 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts\") pod \"49a57c04-2386-4579-996e-664b1b58349d\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883648 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqbb9\" (UniqueName: \"kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9\") pod \"49a57c04-2386-4579-996e-664b1b58349d\" (UID: \"49a57c04-2386-4579-996e-664b1b58349d\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.883685 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts\") pod \"d19920e9-3eff-4701-a81c-872cdf0f424c\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.884212 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49a57c04-2386-4579-996e-664b1b58349d" (UID: "49a57c04-2386-4579-996e-664b1b58349d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.884226 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80dd64d4-0026-4111-b2d3-7428956da9ab" (UID: "80dd64d4-0026-4111-b2d3-7428956da9ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.884389 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d19920e9-3eff-4701-a81c-872cdf0f424c" (UID: "d19920e9-3eff-4701-a81c-872cdf0f424c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.884437 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjmbv\" (UniqueName: \"kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv\") pod \"d19920e9-3eff-4701-a81c-872cdf0f424c\" (UID: \"d19920e9-3eff-4701-a81c-872cdf0f424c\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.884865 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e8e347b3-3740-4fff-aca7-d6204175fda4" (UID: "e8e347b3-3740-4fff-aca7-d6204175fda4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.885077 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a57c04-2386-4579-996e-664b1b58349d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.885096 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d19920e9-3eff-4701-a81c-872cdf0f424c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.885152 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80dd64d4-0026-4111-b2d3-7428956da9ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.885164 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e347b3-3740-4fff-aca7-d6204175fda4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.888480 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv" (OuterVolumeSpecName: "kube-api-access-jjmbv") pod "d19920e9-3eff-4701-a81c-872cdf0f424c" (UID: "d19920e9-3eff-4701-a81c-872cdf0f424c"). InnerVolumeSpecName "kube-api-access-jjmbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.888566 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b" (OuterVolumeSpecName: "kube-api-access-8pc2b") pod "e8e347b3-3740-4fff-aca7-d6204175fda4" (UID: "e8e347b3-3740-4fff-aca7-d6204175fda4"). InnerVolumeSpecName "kube-api-access-8pc2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.902489 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6" (OuterVolumeSpecName: "kube-api-access-shdv6") pod "80dd64d4-0026-4111-b2d3-7428956da9ab" (UID: "80dd64d4-0026-4111-b2d3-7428956da9ab"). InnerVolumeSpecName "kube-api-access-shdv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.910680 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9" (OuterVolumeSpecName: "kube-api-access-fqbb9") pod "49a57c04-2386-4579-996e-664b1b58349d" (UID: "49a57c04-2386-4579-996e-664b1b58349d"). InnerVolumeSpecName "kube-api-access-fqbb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.985855 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts\") pod \"b061ca68-5423-4891-a883-f5ed470789e8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.985919 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs4nh\" (UniqueName: \"kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh\") pod \"b061ca68-5423-4891-a883-f5ed470789e8\" (UID: \"b061ca68-5423-4891-a883-f5ed470789e8\") " Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.986138 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pc2b\" (UniqueName: \"kubernetes.io/projected/e8e347b3-3740-4fff-aca7-d6204175fda4-kube-api-access-8pc2b\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.986150 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqbb9\" (UniqueName: \"kubernetes.io/projected/49a57c04-2386-4579-996e-664b1b58349d-kube-api-access-fqbb9\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.986159 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjmbv\" (UniqueName: \"kubernetes.io/projected/d19920e9-3eff-4701-a81c-872cdf0f424c-kube-api-access-jjmbv\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.986168 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shdv6\" (UniqueName: \"kubernetes.io/projected/80dd64d4-0026-4111-b2d3-7428956da9ab-kube-api-access-shdv6\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.986358 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b061ca68-5423-4891-a883-f5ed470789e8" (UID: "b061ca68-5423-4891-a883-f5ed470789e8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:31 crc kubenswrapper[4763]: I1206 08:29:31.991838 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh" (OuterVolumeSpecName: "kube-api-access-cs4nh") pod "b061ca68-5423-4891-a883-f5ed470789e8" (UID: "b061ca68-5423-4891-a883-f5ed470789e8"). InnerVolumeSpecName "kube-api-access-cs4nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.087496 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b061ca68-5423-4891-a883-f5ed470789e8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.087798 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs4nh\" (UniqueName: \"kubernetes.io/projected/b061ca68-5423-4891-a883-f5ed470789e8-kube-api-access-cs4nh\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.453687 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-szpbq" event={"ID":"49a57c04-2386-4579-996e-664b1b58349d","Type":"ContainerDied","Data":"694582a852d82ba2e134a7b25c5f72e310ea4b33e5d03102467275af5b9c46ed"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.453981 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="694582a852d82ba2e134a7b25c5f72e310ea4b33e5d03102467275af5b9c46ed" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.453715 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-szpbq" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.455045 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-lzmz8" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.455048 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-lzmz8" event={"ID":"b061ca68-5423-4891-a883-f5ed470789e8","Type":"ContainerDied","Data":"ef18bb0ec07df8b5bae660cc73e99c502565d317e7f227ea2708ca7e5f583724"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.455189 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef18bb0ec07df8b5bae660cc73e99c502565d317e7f227ea2708ca7e5f583724" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.456295 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b7d-account-create-update-2ts6f" event={"ID":"e8e347b3-3740-4fff-aca7-d6204175fda4","Type":"ContainerDied","Data":"ae616ebccd61b5cb9e2f895e424c341f30e42d8951a856d2ee31fe7770f99c0a"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.456333 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae616ebccd61b5cb9e2f895e424c341f30e42d8951a856d2ee31fe7770f99c0a" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.456307 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b7d-account-create-update-2ts6f" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.457545 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-j8569" event={"ID":"80dd64d4-0026-4111-b2d3-7428956da9ab","Type":"ContainerDied","Data":"4e21f7683ae00f2c14ebf057757b545ad8f65ee135a587bce89a5f88ec68a0d5"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.457570 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e21f7683ae00f2c14ebf057757b545ad8f65ee135a587bce89a5f88ec68a0d5" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.457614 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-j8569" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.464089 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f887-account-create-update-2tnbk" event={"ID":"d19920e9-3eff-4701-a81c-872cdf0f424c","Type":"ContainerDied","Data":"6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.464140 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cab71936e5079c9f744b0a9b392e69070b3e35a41eb99ce4e05ecd887baf462" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.464097 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f887-account-create-update-2tnbk" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.465508 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kr9kp" event={"ID":"53409f27-f50b-4048-b355-8bc4b6956cf5","Type":"ContainerStarted","Data":"717ac97dc903c4c6956bfd29cc90915c7937bdeff1a5b2d435f9abc4f3d71af2"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.467391 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-97llg" event={"ID":"c133dec0-ffee-47f1-949f-72aeeed1163c","Type":"ContainerStarted","Data":"3e182e52bd93fcf2a9ca40dd38bdfa54e673289b70a059f736f486687788006f"} Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.482378 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-kr9kp" podStartSLOduration=2.129446563 podStartE2EDuration="12.482318217s" podCreationTimestamp="2025-12-06 08:29:20 +0000 UTC" firstStartedPulling="2025-12-06 08:29:21.874609021 +0000 UTC m=+1044.450314059" lastFinishedPulling="2025-12-06 08:29:32.227480675 +0000 UTC m=+1054.803185713" observedRunningTime="2025-12-06 08:29:32.479596013 +0000 UTC m=+1055.055301061" watchObservedRunningTime="2025-12-06 08:29:32.482318217 +0000 UTC m=+1055.058023255" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.502233 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-97llg" podStartSLOduration=2.601329086 podStartE2EDuration="12.502214834s" podCreationTimestamp="2025-12-06 08:29:20 +0000 UTC" firstStartedPulling="2025-12-06 08:29:22.263613106 +0000 UTC m=+1044.839318144" lastFinishedPulling="2025-12-06 08:29:32.164498664 +0000 UTC m=+1054.740203892" observedRunningTime="2025-12-06 08:29:32.495376669 +0000 UTC m=+1055.071081717" watchObservedRunningTime="2025-12-06 08:29:32.502214834 +0000 UTC m=+1055.077919862" Dec 06 08:29:32 crc kubenswrapper[4763]: I1206 08:29:32.635047 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vpjrt"] Dec 06 08:29:32 crc kubenswrapper[4763]: W1206 08:29:32.639689 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeb85f24_e43f_4083_a8a2_1d0beebee795.slice/crio-b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72 WatchSource:0}: Error finding container b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72: Status 404 returned error can't find the container with id b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72 Dec 06 08:29:33 crc kubenswrapper[4763]: I1206 08:29:33.477468 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vpjrt" event={"ID":"eeb85f24-e43f-4083-a8a2-1d0beebee795","Type":"ContainerStarted","Data":"b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72"} Dec 06 08:29:36 crc kubenswrapper[4763]: I1206 08:29:36.513184 4763 generic.go:334] "Generic (PLEG): container finished" podID="53409f27-f50b-4048-b355-8bc4b6956cf5" containerID="717ac97dc903c4c6956bfd29cc90915c7937bdeff1a5b2d435f9abc4f3d71af2" exitCode=0 Dec 06 08:29:36 crc kubenswrapper[4763]: I1206 08:29:36.513279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kr9kp" event={"ID":"53409f27-f50b-4048-b355-8bc4b6956cf5","Type":"ContainerDied","Data":"717ac97dc903c4c6956bfd29cc90915c7937bdeff1a5b2d435f9abc4f3d71af2"} Dec 06 08:29:37 crc kubenswrapper[4763]: I1206 08:29:37.523793 4763 generic.go:334] "Generic (PLEG): container finished" podID="c133dec0-ffee-47f1-949f-72aeeed1163c" containerID="3e182e52bd93fcf2a9ca40dd38bdfa54e673289b70a059f736f486687788006f" exitCode=0 Dec 06 08:29:37 crc kubenswrapper[4763]: I1206 08:29:37.524039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-97llg" event={"ID":"c133dec0-ffee-47f1-949f-72aeeed1163c","Type":"ContainerDied","Data":"3e182e52bd93fcf2a9ca40dd38bdfa54e673289b70a059f736f486687788006f"} Dec 06 08:29:43 crc kubenswrapper[4763]: I1206 08:29:43.980696 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.001858 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.035436 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data\") pod \"53409f27-f50b-4048-b355-8bc4b6956cf5\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.035528 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle\") pod \"c133dec0-ffee-47f1-949f-72aeeed1163c\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.059148 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c133dec0-ffee-47f1-949f-72aeeed1163c" (UID: "c133dec0-ffee-47f1-949f-72aeeed1163c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.088762 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data" (OuterVolumeSpecName: "config-data") pod "53409f27-f50b-4048-b355-8bc4b6956cf5" (UID: "53409f27-f50b-4048-b355-8bc4b6956cf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.136983 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data\") pod \"53409f27-f50b-4048-b355-8bc4b6956cf5\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.137105 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle\") pod \"53409f27-f50b-4048-b355-8bc4b6956cf5\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.137383 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rtl7\" (UniqueName: \"kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7\") pod \"53409f27-f50b-4048-b355-8bc4b6956cf5\" (UID: \"53409f27-f50b-4048-b355-8bc4b6956cf5\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.137415 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data\") pod \"c133dec0-ffee-47f1-949f-72aeeed1163c\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.137442 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64tjt\" (UniqueName: \"kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt\") pod \"c133dec0-ffee-47f1-949f-72aeeed1163c\" (UID: \"c133dec0-ffee-47f1-949f-72aeeed1163c\") " Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.138328 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.138527 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.141470 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "53409f27-f50b-4048-b355-8bc4b6956cf5" (UID: "53409f27-f50b-4048-b355-8bc4b6956cf5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.142078 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7" (OuterVolumeSpecName: "kube-api-access-4rtl7") pod "53409f27-f50b-4048-b355-8bc4b6956cf5" (UID: "53409f27-f50b-4048-b355-8bc4b6956cf5"). InnerVolumeSpecName "kube-api-access-4rtl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.142142 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt" (OuterVolumeSpecName: "kube-api-access-64tjt") pod "c133dec0-ffee-47f1-949f-72aeeed1163c" (UID: "c133dec0-ffee-47f1-949f-72aeeed1163c"). InnerVolumeSpecName "kube-api-access-64tjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.171615 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53409f27-f50b-4048-b355-8bc4b6956cf5" (UID: "53409f27-f50b-4048-b355-8bc4b6956cf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.190212 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data" (OuterVolumeSpecName: "config-data") pod "c133dec0-ffee-47f1-949f-72aeeed1163c" (UID: "c133dec0-ffee-47f1-949f-72aeeed1163c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.240256 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rtl7\" (UniqueName: \"kubernetes.io/projected/53409f27-f50b-4048-b355-8bc4b6956cf5-kube-api-access-4rtl7\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.240297 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c133dec0-ffee-47f1-949f-72aeeed1163c-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.240309 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64tjt\" (UniqueName: \"kubernetes.io/projected/c133dec0-ffee-47f1-949f-72aeeed1163c-kube-api-access-64tjt\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.240324 4763 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.240336 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53409f27-f50b-4048-b355-8bc4b6956cf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.580832 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-97llg" event={"ID":"c133dec0-ffee-47f1-949f-72aeeed1163c","Type":"ContainerDied","Data":"e63d0488a2722fccdc22537f574f3f57ac145d9be704bb60472c9181b10b11d7"} Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.581145 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e63d0488a2722fccdc22537f574f3f57ac145d9be704bb60472c9181b10b11d7" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.580869 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-97llg" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.582782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vpjrt" event={"ID":"eeb85f24-e43f-4083-a8a2-1d0beebee795","Type":"ContainerStarted","Data":"94ab73bf2720f4f7927f40391b6d98aea89d4f03fe83e1c60d1ab87047875f54"} Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.584181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kr9kp" event={"ID":"53409f27-f50b-4048-b355-8bc4b6956cf5","Type":"ContainerDied","Data":"c6a332692eb895a240f6637ec0b7198f1d731f0a4417de62e52d456fc82c4ca0"} Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.584202 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6a332692eb895a240f6637ec0b7198f1d731f0a4417de62e52d456fc82c4ca0" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.584208 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kr9kp" Dec 06 08:29:44 crc kubenswrapper[4763]: I1206 08:29:44.608316 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-vpjrt" podStartSLOduration=4.39270415 podStartE2EDuration="15.608300732s" podCreationTimestamp="2025-12-06 08:29:29 +0000 UTC" firstStartedPulling="2025-12-06 08:29:32.642244745 +0000 UTC m=+1055.217949783" lastFinishedPulling="2025-12-06 08:29:43.857841327 +0000 UTC m=+1066.433546365" observedRunningTime="2025-12-06 08:29:44.604047067 +0000 UTC m=+1067.179752105" watchObservedRunningTime="2025-12-06 08:29:44.608300732 +0000 UTC m=+1067.184005770" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.194933 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195374 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80dd64d4-0026-4111-b2d3-7428956da9ab" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195392 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="80dd64d4-0026-4111-b2d3-7428956da9ab" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195407 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8e347b3-3740-4fff-aca7-d6204175fda4" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195415 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8e347b3-3740-4fff-aca7-d6204175fda4" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195442 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c133dec0-ffee-47f1-949f-72aeeed1163c" containerName="keystone-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195451 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c133dec0-ffee-47f1-949f-72aeeed1163c" containerName="keystone-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195470 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53409f27-f50b-4048-b355-8bc4b6956cf5" containerName="watcher-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195478 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="53409f27-f50b-4048-b355-8bc4b6956cf5" containerName="watcher-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195493 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a57c04-2386-4579-996e-664b1b58349d" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195508 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a57c04-2386-4579-996e-664b1b58349d" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195529 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19920e9-3eff-4701-a81c-872cdf0f424c" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195536 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19920e9-3eff-4701-a81c-872cdf0f424c" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: E1206 08:29:45.195558 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b061ca68-5423-4891-a883-f5ed470789e8" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195566 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b061ca68-5423-4891-a883-f5ed470789e8" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195781 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b061ca68-5423-4891-a883-f5ed470789e8" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195798 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c133dec0-ffee-47f1-949f-72aeeed1163c" containerName="keystone-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195822 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="80dd64d4-0026-4111-b2d3-7428956da9ab" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195836 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="53409f27-f50b-4048-b355-8bc4b6956cf5" containerName="watcher-db-sync" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195850 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d19920e9-3eff-4701-a81c-872cdf0f424c" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195866 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8e347b3-3740-4fff-aca7-d6204175fda4" containerName="mariadb-account-create-update" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.195879 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="49a57c04-2386-4579-996e-664b1b58349d" containerName="mariadb-database-create" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.197121 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.208635 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-87d8l"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.210091 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.214339 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.214611 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.214960 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.214993 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gm8vr" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.215279 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.219765 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-87d8l"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.251153 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258596 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258651 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258676 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258697 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258719 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdprw\" (UniqueName: \"kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258754 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258772 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258799 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258834 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dldck\" (UniqueName: \"kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258862 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258877 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.258916 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361259 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361339 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361391 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361462 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361501 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361529 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361564 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361601 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdprw\" (UniqueName: \"kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361668 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361758 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.361822 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dldck\" (UniqueName: \"kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.365799 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.367828 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.368518 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.369839 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.377734 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.390475 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.391759 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.403607 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.416660 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.425864 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.432089 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdprw\" (UniqueName: \"kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw\") pod \"keystone-bootstrap-87d8l\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.453305 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dldck\" (UniqueName: \"kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck\") pod \"dnsmasq-dns-68b58f878c-gnm7j\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.534308 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.546365 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.615524 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-888r5"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.617214 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.640662 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.640967 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-6r692" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.641098 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.694768 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-888r5"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.782851 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.783189 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.783221 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.783240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.783314 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.783334 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t54lp\" (UniqueName: \"kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.787025 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.789784 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.805660 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-tgmcp" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.805977 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.821973 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.826729 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.847030 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-2j26g"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.848698 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884548 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884593 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884619 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884641 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884657 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884675 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884693 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mslkv\" (UniqueName: \"kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884716 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884752 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884781 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.884798 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t54lp\" (UniqueName: \"kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.885636 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.908513 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.908827 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.911042 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.929056 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-k6n7z" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.929165 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.929655 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mfl2g" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.929836 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.930041 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.934408 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.951007 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.957681 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t54lp\" (UniqueName: \"kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp\") pod \"cinder-db-sync-888r5\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.957710 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.982662 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2j26g"] Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990032 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrswr\" (UniqueName: \"kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990098 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990120 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990167 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdzmq\" (UniqueName: \"kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990214 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990249 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990268 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990287 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990334 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990353 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mslkv\" (UniqueName: \"kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990393 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990423 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.990443 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.991511 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:45 crc kubenswrapper[4763]: I1206 08:29:45.997602 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.014216 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.033020 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.058459 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.065491 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mslkv\" (UniqueName: \"kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv\") pod \"watcher-api-0\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " pod="openstack/watcher-api-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.083232 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091525 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrswr\" (UniqueName: \"kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091571 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091595 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091627 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdzmq\" (UniqueName: \"kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091672 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091692 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091709 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.091761 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.092371 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.093128 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.094049 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.102414 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.105712 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.106047 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.126105 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.127263 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.136619 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.151884 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.152675 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrswr\" (UniqueName: \"kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr\") pod \"horizon-8476f8b67c-mpk68\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.162168 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdzmq\" (UniqueName: \"kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq\") pod \"neutron-db-sync-2j26g\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.168456 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.190252 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2j26g" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.197988 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.198047 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.198074 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9tk7\" (UniqueName: \"kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.198102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.198138 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.241288 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-888r5" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.246050 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.299969 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.300224 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.300336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9tk7\" (UniqueName: \"kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.300424 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.300507 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.308754 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.329833 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.331023 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.331600 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.332733 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9tk7\" (UniqueName: \"kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.333215 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.334971 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.356383 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.356645 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.372682 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-f8hjt"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.373780 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.376009 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.383753 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-x7jlx" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.384218 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411149 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411191 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnpcv\" (UniqueName: \"kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411222 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411239 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411536 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411600 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.411895 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.431573 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.433077 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.435502 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-f8hjt"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.440270 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.473953 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.500304 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514092 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68hnx\" (UniqueName: \"kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514136 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjcdp\" (UniqueName: \"kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514167 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514205 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514225 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514241 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514276 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514303 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnpcv\" (UniqueName: \"kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514335 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514355 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514373 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514404 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514436 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514453 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514474 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.514510 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.516794 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.517340 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.521626 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.526157 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.526396 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.527040 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.530570 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.530645 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.533833 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnpcv\" (UniqueName: \"kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv\") pod \"ceilometer-0\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.542729 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.544324 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.562800 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.572966 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jffbq"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.574708 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.609780 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jffbq"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633524 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q9q9\" (UniqueName: \"kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633595 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633665 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633734 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633829 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633924 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.633986 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634019 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68hnx\" (UniqueName: \"kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634050 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjcdp\" (UniqueName: \"kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634110 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634149 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634180 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634209 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.634296 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.648047 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hrt54" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.648428 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.650330 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.651241 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.657277 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.658789 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.659965 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" event={"ID":"4fc77560-e6e3-4cd9-bdd0-095aefb31625","Type":"ContainerStarted","Data":"1885851b8ae3d27d6c1cbc3c2ca4e737bfdddceb73bc4657949f58063d3ad690"} Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.662731 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.670445 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.683849 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.691191 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjcdp\" (UniqueName: \"kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp\") pod \"watcher-applier-0\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.693463 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.696113 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.717337 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68hnx\" (UniqueName: \"kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx\") pod \"placement-db-sync-f8hjt\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.723118 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736289 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q9q9\" (UniqueName: \"kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736350 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736441 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736528 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736572 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736608 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736635 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.736679 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz4jc\" (UniqueName: \"kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.742649 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.745236 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.746071 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.748286 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.751858 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f8hjt" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.760767 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.771127 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q9q9\" (UniqueName: \"kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9\") pod \"horizon-74dc665cdf-b5fpf\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.812990 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.838846 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb6bs\" (UniqueName: \"kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.839733 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.844052 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.851072 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.851129 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.851267 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.851290 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.851322 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz4jc\" (UniqueName: \"kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.853083 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.853273 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.866710 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.879812 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz4jc\" (UniqueName: \"kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.895145 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data\") pod \"barbican-db-sync-jffbq\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.919417 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954455 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954537 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954558 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954587 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954622 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.954666 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb6bs\" (UniqueName: \"kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.955399 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.955789 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.957212 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.955798 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jffbq" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.961635 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.962210 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.981872 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-87d8l"] Dec 06 08:29:46 crc kubenswrapper[4763]: I1206 08:29:46.988099 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb6bs\" (UniqueName: \"kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs\") pod \"dnsmasq-dns-656f7475df-b6hdj\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.194878 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.319422 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.327969 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.459075 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-888r5"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.469124 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2j26g"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.677122 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.681659 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8476f8b67c-mpk68" event={"ID":"c9213712-3273-4ab8-8810-f6f1d4ef8fbc","Type":"ContainerStarted","Data":"1ebcd0ed9280d953abce94e16520805d71f02cacf26d447513f4c64282c6adc6"} Dec 06 08:29:47 crc kubenswrapper[4763]: W1206 08:29:47.682575 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd29048c3_9081_403e_80a2_bd13ee959417.slice/crio-9aa3f9622ffa62321e7fb8e4a5ac9127e46b7b9220081f91987bec1dab5739b9 WatchSource:0}: Error finding container 9aa3f9622ffa62321e7fb8e4a5ac9127e46b7b9220081f91987bec1dab5739b9: Status 404 returned error can't find the container with id 9aa3f9622ffa62321e7fb8e4a5ac9127e46b7b9220081f91987bec1dab5739b9 Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.683149 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-87d8l" event={"ID":"b88367cd-b824-4e91-9d5c-66b9e06d6bf2","Type":"ContainerStarted","Data":"8625d0d2b13ea835dce03f9d6a0c4ad07371d5db1b8fa4fef8d60e8462edf6b1"} Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.689255 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-888r5" event={"ID":"4f62869c-d491-4a12-a88c-1a58ef5b1bea","Type":"ContainerStarted","Data":"f5080d9cd503bbe606ccecb73fa65ab599c2acaec0cc787aeded55772a090776"} Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.696807 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2j26g" event={"ID":"e6214296-e09d-4c7a-a0ec-2d232793129f","Type":"ContainerStarted","Data":"461aaaa9644b7c60324c5d7521104f94601037e78ebe0fae901fdaabba0ee424"} Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.699432 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.714164 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.754510 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-f8hjt"] Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.754650 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerStarted","Data":"9cb21d9d331b2f393bc1d94dde190d86b1ba653b07b6cc1a0c4856d709f440d5"} Dec 06 08:29:47 crc kubenswrapper[4763]: I1206 08:29:47.954578 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jffbq"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.050141 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.444440 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.491939 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.508962 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.576711 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.609026 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.610461 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.667516 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.722609 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.723018 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.723074 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.723170 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g65r\" (UniqueName: \"kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.723197 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.745609 4763 generic.go:334] "Generic (PLEG): container finished" podID="4fc77560-e6e3-4cd9-bdd0-095aefb31625" containerID="c60af3fea30551eefaa5559531865989ce57aac9e5c6895d5e214422d46b8e0c" exitCode=0 Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.746050 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" event={"ID":"4fc77560-e6e3-4cd9-bdd0-095aefb31625","Type":"ContainerDied","Data":"c60af3fea30551eefaa5559531865989ce57aac9e5c6895d5e214422d46b8e0c"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.756345 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-87d8l" event={"ID":"b88367cd-b824-4e91-9d5c-66b9e06d6bf2","Type":"ContainerStarted","Data":"d6d464e842d72aa92c3569828d3c6d1425b5073132344f84405a41f2616e8cd8"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.774393 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jffbq" event={"ID":"e5015508-305d-4f07-a137-85149d98f662","Type":"ContainerStarted","Data":"3cf16c4d3a7d668eb51730ee3fd1a3e97a125ab05a811b5b65e06d0c88acb2ce"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.823693 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2j26g" event={"ID":"e6214296-e09d-4c7a-a0ec-2d232793129f","Type":"ContainerStarted","Data":"b281a7e9f56f3253c2eaf1973115c28ec9a09af7202a550620dbe038d02bb95a"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.838420 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.838481 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.838563 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.838660 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g65r\" (UniqueName: \"kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.838692 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.839043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.845137 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.846046 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-87d8l" podStartSLOduration=3.846018055 podStartE2EDuration="3.846018055s" podCreationTimestamp="2025-12-06 08:29:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:48.836604161 +0000 UTC m=+1071.412309209" watchObservedRunningTime="2025-12-06 08:29:48.846018055 +0000 UTC m=+1071.421723093" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.846396 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" event={"ID":"9c55cdaa-1dd9-4c6e-937e-da63410a649d","Type":"ContainerStarted","Data":"2b408a8ddfd3ba6a8ef89d97985151dbe7c9f24a660f297029a3abfa27bc243c"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.846799 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.850545 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"d29048c3-9081-403e-80a2-bd13ee959417","Type":"ContainerStarted","Data":"9aa3f9622ffa62321e7fb8e4a5ac9127e46b7b9220081f91987bec1dab5739b9"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.852542 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"320085ec70a49cc4b2976aaa26e4764cf735058e7c0da9457716df82a268330f"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.853563 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerStarted","Data":"e17485dc37f60f3a97a22052000a6fab5fdd42c4f719ca927f36666d5cc78ff2"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.854671 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f8hjt" event={"ID":"27e8b45c-35a8-4407-849b-774bd681bf75","Type":"ContainerStarted","Data":"acee28c4d3bb112f7626aed3ffc9f824d024aa7e5021f2aeafbfda519a673d00"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.856289 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.871355 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerStarted","Data":"f4c8ae4668845d5419a42d5a2fff1d8443ff256365fd628f048624caf2cccb66"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.877107 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-2j26g" podStartSLOduration=3.877085514 podStartE2EDuration="3.877085514s" podCreationTimestamp="2025-12-06 08:29:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:48.870679971 +0000 UTC m=+1071.446385019" watchObservedRunningTime="2025-12-06 08:29:48.877085514 +0000 UTC m=+1071.452790552" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.885807 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g65r\" (UniqueName: \"kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r\") pod \"horizon-c56f8d849-s7cf9\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.889214 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74dc665cdf-b5fpf" event={"ID":"8cb11399-f2d8-48d4-83a0-5569bacb0b10","Type":"ContainerStarted","Data":"e8937ff0c8bacfc281e15c4b1209bdf373106541f6997362c8ddc4d8355e64e7"} Dec 06 08:29:48 crc kubenswrapper[4763]: I1206 08:29:48.978212 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.350217 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458622 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458706 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dldck\" (UniqueName: \"kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458732 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458851 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458887 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.458956 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0\") pod \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\" (UID: \"4fc77560-e6e3-4cd9-bdd0-095aefb31625\") " Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.509164 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck" (OuterVolumeSpecName: "kube-api-access-dldck") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "kube-api-access-dldck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.512949 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config" (OuterVolumeSpecName: "config") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.519495 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.532806 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.545341 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.545635 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4fc77560-e6e3-4cd9-bdd0-095aefb31625" (UID: "4fc77560-e6e3-4cd9-bdd0-095aefb31625"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561371 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561401 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561412 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561423 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561433 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dldck\" (UniqueName: \"kubernetes.io/projected/4fc77560-e6e3-4cd9-bdd0-095aefb31625-kube-api-access-dldck\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.561441 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fc77560-e6e3-4cd9-bdd0-095aefb31625-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.631383 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.910061 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c56f8d849-s7cf9" event={"ID":"8a0ede7a-32bc-45bc-94fe-973a86b2ef25","Type":"ContainerStarted","Data":"8ec5076cd8df04592d7f863e899f1040ecb95ce253ae96a19ae749df1c034b9c"} Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.913039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" event={"ID":"4fc77560-e6e3-4cd9-bdd0-095aefb31625","Type":"ContainerDied","Data":"1885851b8ae3d27d6c1cbc3c2ca4e737bfdddceb73bc4657949f58063d3ad690"} Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.913073 4763 scope.go:117] "RemoveContainer" containerID="c60af3fea30551eefaa5559531865989ce57aac9e5c6895d5e214422d46b8e0c" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.913185 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b58f878c-gnm7j" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.927870 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerStarted","Data":"8b2b3854e392032986e988184bd241cdba66459b3b9fd71cd9889f3010c39b36"} Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.928104 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api-log" containerID="cri-o://e17485dc37f60f3a97a22052000a6fab5fdd42c4f719ca927f36666d5cc78ff2" gracePeriod=30 Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.928203 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" containerID="cri-o://8b2b3854e392032986e988184bd241cdba66459b3b9fd71cd9889f3010c39b36" gracePeriod=30 Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.928442 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.931760 4763 generic.go:334] "Generic (PLEG): container finished" podID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerID="73e5671153dd70acb463cbee67f5a4324905a3a1eaf9737c9478a39acf102aaa" exitCode=0 Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.931845 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" event={"ID":"9c55cdaa-1dd9-4c6e-937e-da63410a649d","Type":"ContainerDied","Data":"73e5671153dd70acb463cbee67f5a4324905a3a1eaf9737c9478a39acf102aaa"} Dec 06 08:29:49 crc kubenswrapper[4763]: I1206 08:29:49.941975 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": EOF" Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.048050 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.058170 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68b58f878c-gnm7j"] Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.067684 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=5.067662894 podStartE2EDuration="5.067662894s" podCreationTimestamp="2025-12-06 08:29:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:49.980712566 +0000 UTC m=+1072.556417604" watchObservedRunningTime="2025-12-06 08:29:50.067662894 +0000 UTC m=+1072.643367932" Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.951668 4763 generic.go:334] "Generic (PLEG): container finished" podID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerID="e17485dc37f60f3a97a22052000a6fab5fdd42c4f719ca927f36666d5cc78ff2" exitCode=143 Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.951736 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerDied","Data":"e17485dc37f60f3a97a22052000a6fab5fdd42c4f719ca927f36666d5cc78ff2"} Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.954730 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" event={"ID":"9c55cdaa-1dd9-4c6e-937e-da63410a649d","Type":"ContainerStarted","Data":"0b5fca88a783e5405c5fafef74d7d49576e5a06c19a0fad7fa4e76b59b8324ab"} Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.955875 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:50 crc kubenswrapper[4763]: I1206 08:29:50.982963 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" podStartSLOduration=4.98294416 podStartE2EDuration="4.98294416s" podCreationTimestamp="2025-12-06 08:29:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:29:50.979955249 +0000 UTC m=+1073.555660297" watchObservedRunningTime="2025-12-06 08:29:50.98294416 +0000 UTC m=+1073.558649198" Dec 06 08:29:51 crc kubenswrapper[4763]: I1206 08:29:51.152406 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:29:51 crc kubenswrapper[4763]: I1206 08:29:51.736548 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fc77560-e6e3-4cd9-bdd0-095aefb31625" path="/var/lib/kubelet/pods/4fc77560-e6e3-4cd9-bdd0-095aefb31625/volumes" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.060788 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.096317 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:29:55 crc kubenswrapper[4763]: E1206 08:29:55.096833 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fc77560-e6e3-4cd9-bdd0-095aefb31625" containerName="init" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.096856 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fc77560-e6e3-4cd9-bdd0-095aefb31625" containerName="init" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.097109 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fc77560-e6e3-4cd9-bdd0-095aefb31625" containerName="init" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.098262 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.102839 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.103137 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": read tcp 10.217.0.2:53586->10.217.0.147:9322: read: connection reset by peer" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.120979 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.136988 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.174891 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5b557d69b-qxvcs"] Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.176416 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.201830 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b557d69b-qxvcs"] Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218414 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218461 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wknps\" (UniqueName: \"kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218507 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218527 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218575 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218598 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.218623 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.319860 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-secret-key\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.319919 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-combined-ca-bundle\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.319976 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g87dp\" (UniqueName: \"kubernetes.io/projected/dee918b0-2519-402f-881e-052ffd7df1c0-kube-api-access-g87dp\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320006 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320033 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320052 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-tls-certs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320118 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320150 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320185 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320234 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dee918b0-2519-402f-881e-052ffd7df1c0-logs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320293 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320316 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-config-data\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320349 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wknps\" (UniqueName: \"kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320375 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-scripts\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.320921 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.321423 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.322727 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.340785 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.342063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.346515 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.347668 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wknps\" (UniqueName: \"kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps\") pod \"horizon-6d9d9cc79d-g6nvn\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422388 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-scripts\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422470 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-secret-key\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422495 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-combined-ca-bundle\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422554 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g87dp\" (UniqueName: \"kubernetes.io/projected/dee918b0-2519-402f-881e-052ffd7df1c0-kube-api-access-g87dp\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422581 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-tls-certs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422731 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dee918b0-2519-402f-881e-052ffd7df1c0-logs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.422789 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-config-data\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.423335 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-scripts\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.423625 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dee918b0-2519-402f-881e-052ffd7df1c0-logs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.424411 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dee918b0-2519-402f-881e-052ffd7df1c0-config-data\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.427721 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-secret-key\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.428229 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-combined-ca-bundle\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.428246 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dee918b0-2519-402f-881e-052ffd7df1c0-horizon-tls-certs\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.442170 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g87dp\" (UniqueName: \"kubernetes.io/projected/dee918b0-2519-402f-881e-052ffd7df1c0-kube-api-access-g87dp\") pod \"horizon-5b557d69b-qxvcs\" (UID: \"dee918b0-2519-402f-881e-052ffd7df1c0\") " pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.493189 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:29:55 crc kubenswrapper[4763]: I1206 08:29:55.518947 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:29:56 crc kubenswrapper[4763]: I1206 08:29:56.028873 4763 generic.go:334] "Generic (PLEG): container finished" podID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerID="8b2b3854e392032986e988184bd241cdba66459b3b9fd71cd9889f3010c39b36" exitCode=0 Dec 06 08:29:56 crc kubenswrapper[4763]: I1206 08:29:56.028938 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerDied","Data":"8b2b3854e392032986e988184bd241cdba66459b3b9fd71cd9889f3010c39b36"} Dec 06 08:29:56 crc kubenswrapper[4763]: I1206 08:29:56.153407 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": dial tcp 10.217.0.147:9322: connect: connection refused" Dec 06 08:29:57 crc kubenswrapper[4763]: I1206 08:29:57.045431 4763 generic.go:334] "Generic (PLEG): container finished" podID="b88367cd-b824-4e91-9d5c-66b9e06d6bf2" containerID="d6d464e842d72aa92c3569828d3c6d1425b5073132344f84405a41f2616e8cd8" exitCode=0 Dec 06 08:29:57 crc kubenswrapper[4763]: I1206 08:29:57.045504 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-87d8l" event={"ID":"b88367cd-b824-4e91-9d5c-66b9e06d6bf2","Type":"ContainerDied","Data":"d6d464e842d72aa92c3569828d3c6d1425b5073132344f84405a41f2616e8cd8"} Dec 06 08:29:57 crc kubenswrapper[4763]: I1206 08:29:57.197713 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:29:57 crc kubenswrapper[4763]: I1206 08:29:57.257474 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:29:57 crc kubenswrapper[4763]: I1206 08:29:57.257699 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" containerID="cri-o://bdd8adec75d19e417f66dadf92294dbbabd65123b7cfc021dab86b9a8e30a3f2" gracePeriod=10 Dec 06 08:29:58 crc kubenswrapper[4763]: I1206 08:29:58.056781 4763 generic.go:334] "Generic (PLEG): container finished" podID="72235379-a68d-41c8-9e7d-2880837b22d7" containerID="bdd8adec75d19e417f66dadf92294dbbabd65123b7cfc021dab86b9a8e30a3f2" exitCode=0 Dec 06 08:29:58 crc kubenswrapper[4763]: I1206 08:29:58.056866 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" event={"ID":"72235379-a68d-41c8-9e7d-2880837b22d7","Type":"ContainerDied","Data":"bdd8adec75d19e417f66dadf92294dbbabd65123b7cfc021dab86b9a8e30a3f2"} Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.134105 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256"] Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.136148 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.138687 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.138841 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.146659 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256"] Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.217516 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.217694 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.217795 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqztv\" (UniqueName: \"kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.319945 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.320026 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.320056 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqztv\" (UniqueName: \"kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.328024 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.329086 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.357675 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqztv\" (UniqueName: \"kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv\") pod \"collect-profiles-29416830-sp256\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:00 crc kubenswrapper[4763]: I1206 08:30:00.456273 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:01 crc kubenswrapper[4763]: I1206 08:30:01.153116 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": dial tcp 10.217.0.147:9322: connect: connection refused" Dec 06 08:30:01 crc kubenswrapper[4763]: I1206 08:30:01.706477 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 06 08:30:06 crc kubenswrapper[4763]: I1206 08:30:06.175611 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": dial tcp 10.217.0.147:9322: connect: connection refused" Dec 06 08:30:06 crc kubenswrapper[4763]: E1206 08:30:06.519131 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:06 crc kubenswrapper[4763]: E1206 08:30:06.519197 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:06 crc kubenswrapper[4763]: E1206 08:30:06.519353 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n595h68h5b9hc8h598h98h686hd5h5cfh68h696hb7h96h54fh579h54bh67hbch87h64bh79h5bfh64ch668h55dh7fh584h7dh5h668h545h5dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7q9q9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-74dc665cdf-b5fpf_openstack(8cb11399-f2d8-48d4-83a0-5569bacb0b10): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:06 crc kubenswrapper[4763]: E1206 08:30:06.522926 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-74dc665cdf-b5fpf" podUID="8cb11399-f2d8-48d4-83a0-5569bacb0b10" Dec 06 08:30:06 crc kubenswrapper[4763]: I1206 08:30:06.706556 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.237210 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-placement-api:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.238205 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-placement-api:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.238359 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:38.102.83.156:5001/podified-master-centos10/openstack-placement-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-68hnx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-f8hjt_openstack(27e8b45c-35a8-4407-849b-774bd681bf75): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.241184 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-f8hjt" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.275798 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.275872 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.275997 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n576h668h5cfh88h669h99h5cdh7bhf7h5cfhdfh666hd8hd8h566hbh569h64dhfdh67dh68dh56h5f9h58bh664h558h659h694hdfh8bh5f5h558q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wrswr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-8476f8b67c-mpk68_openstack(c9213712-3273-4ab8-8810-f6f1d4ef8fbc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.279805 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-8476f8b67c-mpk68" podUID="c9213712-3273-4ab8-8810-f6f1d4ef8fbc" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.297322 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.297387 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.297506 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n558h58fh555h689h9fhb7h5cfh56bh55dh5dbh78hfh547h64ch5cdh549h69h669h54h76h59hcbh564h8h59ch697hfh7bh9bh588h7bh569q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7g65r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-c56f8d849-s7cf9_openstack(8a0ede7a-32bc-45bc-94fe-973a86b2ef25): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:09 crc kubenswrapper[4763]: E1206 08:30:09.306250 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-c56f8d849-s7cf9" podUID="8a0ede7a-32bc-45bc-94fe-973a86b2ef25" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.336972 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389491 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389579 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389621 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdprw\" (UniqueName: \"kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389688 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389784 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.389991 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys\") pod \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\" (UID: \"b88367cd-b824-4e91-9d5c-66b9e06d6bf2\") " Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.398452 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.398712 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw" (OuterVolumeSpecName: "kube-api-access-kdprw") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "kube-api-access-kdprw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.398929 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts" (OuterVolumeSpecName: "scripts") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.407553 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.424976 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.452235 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data" (OuterVolumeSpecName: "config-data") pod "b88367cd-b824-4e91-9d5c-66b9e06d6bf2" (UID: "b88367cd-b824-4e91-9d5c-66b9e06d6bf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492854 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492940 4763 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492954 4763 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492963 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492975 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdprw\" (UniqueName: \"kubernetes.io/projected/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-kube-api-access-kdprw\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:09 crc kubenswrapper[4763]: I1206 08:30:09.492985 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88367cd-b824-4e91-9d5c-66b9e06d6bf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.166525 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-87d8l" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.166810 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-87d8l" event={"ID":"b88367cd-b824-4e91-9d5c-66b9e06d6bf2","Type":"ContainerDied","Data":"8625d0d2b13ea835dce03f9d6a0c4ad07371d5db1b8fa4fef8d60e8462edf6b1"} Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.166848 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8625d0d2b13ea835dce03f9d6a0c4ad07371d5db1b8fa4fef8d60e8462edf6b1" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.168594 4763 generic.go:334] "Generic (PLEG): container finished" podID="eeb85f24-e43f-4083-a8a2-1d0beebee795" containerID="94ab73bf2720f4f7927f40391b6d98aea89d4f03fe83e1c60d1ab87047875f54" exitCode=0 Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.168734 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vpjrt" event={"ID":"eeb85f24-e43f-4083-a8a2-1d0beebee795","Type":"ContainerDied","Data":"94ab73bf2720f4f7927f40391b6d98aea89d4f03fe83e1c60d1ab87047875f54"} Dec 06 08:30:10 crc kubenswrapper[4763]: E1206 08:30:10.170186 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-placement-api:watcher_latest\\\"\"" pod="openstack/placement-db-sync-f8hjt" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.474452 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-87d8l"] Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.482844 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-87d8l"] Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.538563 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-54khp"] Dec 06 08:30:10 crc kubenswrapper[4763]: E1206 08:30:10.539005 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b88367cd-b824-4e91-9d5c-66b9e06d6bf2" containerName="keystone-bootstrap" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.539019 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b88367cd-b824-4e91-9d5c-66b9e06d6bf2" containerName="keystone-bootstrap" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.550276 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b88367cd-b824-4e91-9d5c-66b9e06d6bf2" containerName="keystone-bootstrap" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.550818 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-54khp"] Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.550982 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.553593 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.553661 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.553827 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gm8vr" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.553854 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.554071 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.621867 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.621956 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.621991 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.622036 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4bhn\" (UniqueName: \"kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.622215 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.622249 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.723958 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.724004 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.724048 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.724063 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.724079 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.724129 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4bhn\" (UniqueName: \"kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.728961 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.731379 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.731380 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.734368 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.734957 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.739997 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4bhn\" (UniqueName: \"kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn\") pod \"keystone-bootstrap-54khp\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:10 crc kubenswrapper[4763]: I1206 08:30:10.883515 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:11 crc kubenswrapper[4763]: I1206 08:30:11.730935 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b88367cd-b824-4e91-9d5c-66b9e06d6bf2" path="/var/lib/kubelet/pods/b88367cd-b824-4e91-9d5c-66b9e06d6bf2/volumes" Dec 06 08:30:16 crc kubenswrapper[4763]: I1206 08:30:16.153800 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:30:16 crc kubenswrapper[4763]: I1206 08:30:16.706585 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 06 08:30:16 crc kubenswrapper[4763]: I1206 08:30:16.706833 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.024803 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.025162 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-barbican-api:watcher_latest" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.025277 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:38.102.83.156:5001/podified-master-centos10/openstack-barbican-api:watcher_latest,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qz4jc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-jffbq_openstack(e5015508-305d-4f07-a137-85149d98f662): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.026512 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-jffbq" podUID="e5015508-305d-4f07-a137-85149d98f662" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.162612 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.169836 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.177958 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.203032 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.227434 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.237064 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vpjrt" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.240164 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" event={"ID":"72235379-a68d-41c8-9e7d-2880837b22d7","Type":"ContainerDied","Data":"c7fd6ef7275ffc87dcfadda5c74773cb38860754436fb01b68fe77a9a255f2f5"} Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.240204 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.240214 4763 scope.go:117] "RemoveContainer" containerID="bdd8adec75d19e417f66dadf92294dbbabd65123b7cfc021dab86b9a8e30a3f2" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.247714 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key\") pod \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.247783 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data\") pod \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.247990 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248051 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs\") pod \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248362 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mslkv\" (UniqueName: \"kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv\") pod \"b1add731-e870-4d5e-84fb-0c6f15a86916\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248406 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data\") pod \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248430 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts\") pod \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248451 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs\") pod \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248495 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q9q9\" (UniqueName: \"kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9\") pod \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248528 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248553 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle\") pod \"b1add731-e870-4d5e-84fb-0c6f15a86916\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248590 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dwnx\" (UniqueName: \"kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248619 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248643 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248671 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data\") pod \"b1add731-e870-4d5e-84fb-0c6f15a86916\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248696 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrswr\" (UniqueName: \"kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr\") pod \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248742 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb\") pod \"72235379-a68d-41c8-9e7d-2880837b22d7\" (UID: \"72235379-a68d-41c8-9e7d-2880837b22d7\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248767 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts\") pod \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\" (UID: \"c9213712-3273-4ab8-8810-f6f1d4ef8fbc\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248848 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key\") pod \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\" (UID: \"8cb11399-f2d8-48d4-83a0-5569bacb0b10\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.248876 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca\") pod \"b1add731-e870-4d5e-84fb-0c6f15a86916\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.249020 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs\") pod \"b1add731-e870-4d5e-84fb-0c6f15a86916\" (UID: \"b1add731-e870-4d5e-84fb-0c6f15a86916\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.249803 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs" (OuterVolumeSpecName: "logs") pod "c9213712-3273-4ab8-8810-f6f1d4ef8fbc" (UID: "c9213712-3273-4ab8-8810-f6f1d4ef8fbc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.250532 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data" (OuterVolumeSpecName: "config-data") pod "c9213712-3273-4ab8-8810-f6f1d4ef8fbc" (UID: "c9213712-3273-4ab8-8810-f6f1d4ef8fbc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.251093 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data" (OuterVolumeSpecName: "config-data") pod "8cb11399-f2d8-48d4-83a0-5569bacb0b10" (UID: "8cb11399-f2d8-48d4-83a0-5569bacb0b10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.251177 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs" (OuterVolumeSpecName: "logs") pod "b1add731-e870-4d5e-84fb-0c6f15a86916" (UID: "b1add731-e870-4d5e-84fb-0c6f15a86916"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.252041 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts" (OuterVolumeSpecName: "scripts") pod "8cb11399-f2d8-48d4-83a0-5569bacb0b10" (UID: "8cb11399-f2d8-48d4-83a0-5569bacb0b10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.252604 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs" (OuterVolumeSpecName: "logs") pod "8cb11399-f2d8-48d4-83a0-5569bacb0b10" (UID: "8cb11399-f2d8-48d4-83a0-5569bacb0b10"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.257429 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c9213712-3273-4ab8-8810-f6f1d4ef8fbc" (UID: "c9213712-3273-4ab8-8810-f6f1d4ef8fbc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.257882 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c56f8d849-s7cf9" event={"ID":"8a0ede7a-32bc-45bc-94fe-973a86b2ef25","Type":"ContainerDied","Data":"8ec5076cd8df04592d7f863e899f1040ecb95ce253ae96a19ae749df1c034b9c"} Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.258013 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c56f8d849-s7cf9" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.261287 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9" (OuterVolumeSpecName: "kube-api-access-7q9q9") pod "8cb11399-f2d8-48d4-83a0-5569bacb0b10" (UID: "8cb11399-f2d8-48d4-83a0-5569bacb0b10"). InnerVolumeSpecName "kube-api-access-7q9q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.263407 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8cb11399-f2d8-48d4-83a0-5569bacb0b10" (UID: "8cb11399-f2d8-48d4-83a0-5569bacb0b10"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.263856 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts" (OuterVolumeSpecName: "scripts") pod "c9213712-3273-4ab8-8810-f6f1d4ef8fbc" (UID: "c9213712-3273-4ab8-8810-f6f1d4ef8fbc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.266487 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8476f8b67c-mpk68" event={"ID":"c9213712-3273-4ab8-8810-f6f1d4ef8fbc","Type":"ContainerDied","Data":"1ebcd0ed9280d953abce94e16520805d71f02cacf26d447513f4c64282c6adc6"} Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.266587 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8476f8b67c-mpk68" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.269231 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b1add731-e870-4d5e-84fb-0c6f15a86916","Type":"ContainerDied","Data":"9cb21d9d331b2f393bc1d94dde190d86b1ba653b07b6cc1a0c4856d709f440d5"} Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.269262 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.271415 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vpjrt" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.271415 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr" (OuterVolumeSpecName: "kube-api-access-wrswr") pod "c9213712-3273-4ab8-8810-f6f1d4ef8fbc" (UID: "c9213712-3273-4ab8-8810-f6f1d4ef8fbc"). InnerVolumeSpecName "kube-api-access-wrswr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.271416 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vpjrt" event={"ID":"eeb85f24-e43f-4083-a8a2-1d0beebee795","Type":"ContainerDied","Data":"b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72"} Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.271585 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b38c472fa9f7edb8b26bd96737c55b4d1660847813bfea2334cd10f9d076ce72" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.272402 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74dc665cdf-b5fpf" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.272410 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74dc665cdf-b5fpf" event={"ID":"8cb11399-f2d8-48d4-83a0-5569bacb0b10","Type":"ContainerDied","Data":"e8937ff0c8bacfc281e15c4b1209bdf373106541f6997362c8ddc4d8355e64e7"} Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.277046 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-barbican-api:watcher_latest\\\"\"" pod="openstack/barbican-db-sync-jffbq" podUID="e5015508-305d-4f07-a137-85149d98f662" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.289367 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx" (OuterVolumeSpecName: "kube-api-access-2dwnx") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "kube-api-access-2dwnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.316850 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv" (OuterVolumeSpecName: "kube-api-access-mslkv") pod "b1add731-e870-4d5e-84fb-0c6f15a86916" (UID: "b1add731-e870-4d5e-84fb-0c6f15a86916"). InnerVolumeSpecName "kube-api-access-mslkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.331137 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1add731-e870-4d5e-84fb-0c6f15a86916" (UID: "b1add731-e870-4d5e-84fb-0c6f15a86916"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.334612 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.338134 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config" (OuterVolumeSpecName: "config") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.346440 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "b1add731-e870-4d5e-84fb-0c6f15a86916" (UID: "b1add731-e870-4d5e-84fb-0c6f15a86916"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.346910 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351164 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs\") pod \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351215 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data\") pod \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351502 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs" (OuterVolumeSpecName: "logs") pod "8a0ede7a-32bc-45bc-94fe-973a86b2ef25" (UID: "8a0ede7a-32bc-45bc-94fe-973a86b2ef25"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjn24\" (UniqueName: \"kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24\") pod \"eeb85f24-e43f-4083-a8a2-1d0beebee795\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351603 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data\") pod \"eeb85f24-e43f-4083-a8a2-1d0beebee795\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351713 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle\") pod \"eeb85f24-e43f-4083-a8a2-1d0beebee795\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351731 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key\") pod \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351759 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts\") pod \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351839 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data\") pod \"eeb85f24-e43f-4083-a8a2-1d0beebee795\" (UID: \"eeb85f24-e43f-4083-a8a2-1d0beebee795\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.351934 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g65r\" (UniqueName: \"kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r\") pod \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\" (UID: \"8a0ede7a-32bc-45bc-94fe-973a86b2ef25\") " Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352102 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data" (OuterVolumeSpecName: "config-data") pod "8a0ede7a-32bc-45bc-94fe-973a86b2ef25" (UID: "8a0ede7a-32bc-45bc-94fe-973a86b2ef25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352583 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dwnx\" (UniqueName: \"kubernetes.io/projected/72235379-a68d-41c8-9e7d-2880837b22d7-kube-api-access-2dwnx\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352607 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352623 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352635 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352645 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352658 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrswr\" (UniqueName: \"kubernetes.io/projected/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-kube-api-access-wrswr\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352671 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352682 4763 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8cb11399-f2d8-48d4-83a0-5569bacb0b10-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352695 4763 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352705 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1add731-e870-4d5e-84fb-0c6f15a86916-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352694 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts" (OuterVolumeSpecName: "scripts") pod "8a0ede7a-32bc-45bc-94fe-973a86b2ef25" (UID: "8a0ede7a-32bc-45bc-94fe-973a86b2ef25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352716 4763 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352774 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352793 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9213712-3273-4ab8-8810-f6f1d4ef8fbc-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352827 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mslkv\" (UniqueName: \"kubernetes.io/projected/b1add731-e870-4d5e-84fb-0c6f15a86916-kube-api-access-mslkv\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352842 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352855 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cb11399-f2d8-48d4-83a0-5569bacb0b10-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352869 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cb11399-f2d8-48d4-83a0-5569bacb0b10-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352882 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q9q9\" (UniqueName: \"kubernetes.io/projected/8cb11399-f2d8-48d4-83a0-5569bacb0b10-kube-api-access-7q9q9\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352913 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.352942 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.356602 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8a0ede7a-32bc-45bc-94fe-973a86b2ef25" (UID: "8a0ede7a-32bc-45bc-94fe-973a86b2ef25"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.358756 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "eeb85f24-e43f-4083-a8a2-1d0beebee795" (UID: "eeb85f24-e43f-4083-a8a2-1d0beebee795"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.359309 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24" (OuterVolumeSpecName: "kube-api-access-pjn24") pod "eeb85f24-e43f-4083-a8a2-1d0beebee795" (UID: "eeb85f24-e43f-4083-a8a2-1d0beebee795"). InnerVolumeSpecName "kube-api-access-pjn24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.359835 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r" (OuterVolumeSpecName: "kube-api-access-7g65r") pod "8a0ede7a-32bc-45bc-94fe-973a86b2ef25" (UID: "8a0ede7a-32bc-45bc-94fe-973a86b2ef25"). InnerVolumeSpecName "kube-api-access-7g65r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.359947 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.365601 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72235379-a68d-41c8-9e7d-2880837b22d7" (UID: "72235379-a68d-41c8-9e7d-2880837b22d7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.384574 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eeb85f24-e43f-4083-a8a2-1d0beebee795" (UID: "eeb85f24-e43f-4083-a8a2-1d0beebee795"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.392990 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data" (OuterVolumeSpecName: "config-data") pod "b1add731-e870-4d5e-84fb-0c6f15a86916" (UID: "b1add731-e870-4d5e-84fb-0c6f15a86916"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.412345 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data" (OuterVolumeSpecName: "config-data") pod "eeb85f24-e43f-4083-a8a2-1d0beebee795" (UID: "eeb85f24-e43f-4083-a8a2-1d0beebee795"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454265 4763 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454297 4763 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454306 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454315 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454327 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454336 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb85f24-e43f-4083-a8a2-1d0beebee795-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454345 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g65r\" (UniqueName: \"kubernetes.io/projected/8a0ede7a-32bc-45bc-94fe-973a86b2ef25-kube-api-access-7g65r\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454354 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1add731-e870-4d5e-84fb-0c6f15a86916-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454362 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72235379-a68d-41c8-9e7d-2880837b22d7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.454377 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjn24\" (UniqueName: \"kubernetes.io/projected/eeb85f24-e43f-4083-a8a2-1d0beebee795-kube-api-access-pjn24\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.524060 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.570969 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-74dc665cdf-b5fpf"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.619956 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.632139 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5956b77d5c-kbjvz"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.741823 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" path="/var/lib/kubelet/pods/72235379-a68d-41c8-9e7d-2880837b22d7/volumes" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.742875 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cb11399-f2d8-48d4-83a0-5569bacb0b10" path="/var/lib/kubelet/pods/8cb11399-f2d8-48d4-83a0-5569bacb0b10/volumes" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.758420 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.771598 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-c56f8d849-s7cf9"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.784010 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.794935 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805091 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.805556 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api-log" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805576 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api-log" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.805596 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeb85f24-e43f-4083-a8a2-1d0beebee795" containerName="glance-db-sync" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805605 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeb85f24-e43f-4083-a8a2-1d0beebee795" containerName="glance-db-sync" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.805615 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="init" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805623 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="init" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.805652 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805660 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" Dec 06 08:30:17 crc kubenswrapper[4763]: E1206 08:30:17.805680 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805688 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805944 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeb85f24-e43f-4083-a8a2-1d0beebee795" containerName="glance-db-sync" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805960 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805982 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.805996 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api-log" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.810228 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.813205 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.840073 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.850298 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.859731 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8476f8b67c-mpk68"] Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.905261 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.905369 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhcnp\" (UniqueName: \"kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.905397 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.905447 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:17 crc kubenswrapper[4763]: I1206 08:30:17.905478 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.007150 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhcnp\" (UniqueName: \"kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.007217 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.007280 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.007326 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.007378 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.008442 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.011995 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.012661 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.012822 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.026845 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhcnp\" (UniqueName: \"kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp\") pod \"watcher-api-0\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.136037 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:18 crc kubenswrapper[4763]: E1206 08:30:18.626671 4763 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 06 08:30:18 crc kubenswrapper[4763]: E1206 08:30:18.626723 4763 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.156:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Dec 06 08:30:18 crc kubenswrapper[4763]: E1206 08:30:18.626831 4763 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.156:5001/podified-master-centos10/openstack-cinder-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t54lp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-888r5_openstack(4f62869c-d491-4a12-a88c-1a58ef5b1bea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 06 08:30:18 crc kubenswrapper[4763]: E1206 08:30:18.628156 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-888r5" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.741246 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.743086 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.744071 4763 scope.go:117] "RemoveContainer" containerID="da9f2648f4cda724f3e9b0b752366e4dff845834f47bf5faa5317fc7e9a5c674" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.777610 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.867438 4763 scope.go:117] "RemoveContainer" containerID="8b2b3854e392032986e988184bd241cdba66459b3b9fd71cd9889f3010c39b36" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.927792 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxnxl\" (UniqueName: \"kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.928133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.928240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.928273 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.928320 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.928356 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:18 crc kubenswrapper[4763]: I1206 08:30:18.987305 4763 scope.go:117] "RemoveContainer" containerID="e17485dc37f60f3a97a22052000a6fab5fdd42c4f719ca927f36666d5cc78ff2" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031226 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxnxl\" (UniqueName: \"kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031289 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031372 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031426 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031478 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.031526 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.032890 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.032931 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.033610 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.033628 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.033840 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.053291 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxnxl\" (UniqueName: \"kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl\") pod \"dnsmasq-dns-755bf67bd9-mp98m\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.107593 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.298448 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.335793 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.364656 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"d395750ae10f28014e714905ed31ba307adeacd069c0e51d12ae1353d8d31a2e"} Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.367854 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" event={"ID":"5701fead-fb3b-4eeb-a0a4-279b89a10ee9","Type":"ContainerStarted","Data":"be7bd59ca2b55d49f1d7c2e61fc17fcf39a573243ec129e3a9ac8d311d42c67d"} Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.386478 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=5.100284885 podStartE2EDuration="34.386461855s" podCreationTimestamp="2025-12-06 08:29:45 +0000 UTC" firstStartedPulling="2025-12-06 08:29:47.742312281 +0000 UTC m=+1070.318017319" lastFinishedPulling="2025-12-06 08:30:17.028489261 +0000 UTC m=+1099.604194289" observedRunningTime="2025-12-06 08:30:19.386357262 +0000 UTC m=+1101.962062310" watchObservedRunningTime="2025-12-06 08:30:19.386461855 +0000 UTC m=+1101.962166893" Dec 06 08:30:19 crc kubenswrapper[4763]: E1206 08:30:19.395532 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.156:5001/podified-master-centos10/openstack-cinder-api:watcher_latest\\\"\"" pod="openstack/cinder-db-sync-888r5" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.458140 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.477100 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b557d69b-qxvcs"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.581867 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-54khp"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.599807 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.601705 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.603942 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9rxtc" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.604218 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.604377 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.614067 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.729709 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a0ede7a-32bc-45bc-94fe-973a86b2ef25" path="/var/lib/kubelet/pods/8a0ede7a-32bc-45bc-94fe-973a86b2ef25/volumes" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.730235 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" path="/var/lib/kubelet/pods/b1add731-e870-4d5e-84fb-0c6f15a86916/volumes" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.730841 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9213712-3273-4ab8-8810-f6f1d4ef8fbc" path="/var/lib/kubelet/pods/c9213712-3273-4ab8-8810-f6f1d4ef8fbc/volumes" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757486 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757577 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8pwk\" (UniqueName: \"kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757696 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757747 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757770 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757809 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.757839 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.775184 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.856355 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.859276 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.859835 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.859984 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860069 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8pwk\" (UniqueName: \"kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860222 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860286 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860316 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860381 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.860566 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.862354 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.863103 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.863136 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.870374 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.872224 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.872822 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.877581 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.885552 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8pwk\" (UniqueName: \"kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.907766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.916915 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962163 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962201 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962227 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csxhg\" (UniqueName: \"kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962276 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962293 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962315 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:19 crc kubenswrapper[4763]: I1206 08:30:19.962352 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064208 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064532 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064560 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csxhg\" (UniqueName: \"kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064611 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064630 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064658 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064697 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.064790 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.065323 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.065382 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.069852 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.070723 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.081209 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.083741 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csxhg\" (UniqueName: \"kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.117779 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.192496 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.398181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerStarted","Data":"0020985e174ca9d052a7b02fa6c2b67bd9b122c6106e8acf26a9cb35d7098a08"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.402609 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"d29048c3-9081-403e-80a2-bd13ee959417","Type":"ContainerStarted","Data":"e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.412064 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b557d69b-qxvcs" event={"ID":"dee918b0-2519-402f-881e-052ffd7df1c0","Type":"ContainerStarted","Data":"c78078673cdf60da9b5ef5d6174cb0c5b96d8e82cdf14f4c79889610c086fc8c"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.419998 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerStarted","Data":"1ea499137985a49e7bec6f2582f0d3ef8e7c98c5ac9ce64769aa1e2c81807e1b"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.423336 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerStarted","Data":"236303f2ca93ae7eaf03dcbbd8096f81c63a53c8c9b24b2b7bf625cd7ba17d8d"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.426660 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" event={"ID":"5701fead-fb3b-4eeb-a0a4-279b89a10ee9","Type":"ContainerStarted","Data":"bed80c7b261a8b7fc449b3b2148f37db30f01b6b829b518f8d14445723481ede"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.429220 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" event={"ID":"ae997020-df09-4b74-8b6a-b0ad1adb24bf","Type":"ContainerStarted","Data":"53927adf808dd4fa831b5e002067b88928153d19ae741ca213708ff329b281d9"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.431942 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54khp" event={"ID":"03ab1923-fd90-45e6-9513-4ccc9b59667b","Type":"ContainerStarted","Data":"71f52375f6822ccef041dc2ed2dbf31d74941583604b587fef6a09430a8ca9d8"} Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.441239 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=5.124485483 podStartE2EDuration="34.441213137s" podCreationTimestamp="2025-12-06 08:29:46 +0000 UTC" firstStartedPulling="2025-12-06 08:29:47.69558821 +0000 UTC m=+1070.271293248" lastFinishedPulling="2025-12-06 08:30:17.012315864 +0000 UTC m=+1099.588020902" observedRunningTime="2025-12-06 08:30:20.42321239 +0000 UTC m=+1102.998917448" watchObservedRunningTime="2025-12-06 08:30:20.441213137 +0000 UTC m=+1103.016918175" Dec 06 08:30:20 crc kubenswrapper[4763]: I1206 08:30:20.444663 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" podStartSLOduration=20.444648999 podStartE2EDuration="20.444648999s" podCreationTimestamp="2025-12-06 08:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:20.439157441 +0000 UTC m=+1103.014862489" watchObservedRunningTime="2025-12-06 08:30:20.444648999 +0000 UTC m=+1103.020354037" Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.159998 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b1add731-e870-4d5e-84fb-0c6f15a86916" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.147:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.326514 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.403156 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.444688 4763 generic.go:334] "Generic (PLEG): container finished" podID="5701fead-fb3b-4eeb-a0a4-279b89a10ee9" containerID="bed80c7b261a8b7fc449b3b2148f37db30f01b6b829b518f8d14445723481ede" exitCode=0 Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.444743 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" event={"ID":"5701fead-fb3b-4eeb-a0a4-279b89a10ee9","Type":"ContainerDied","Data":"bed80c7b261a8b7fc449b3b2148f37db30f01b6b829b518f8d14445723481ede"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.450375 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" event={"ID":"ae997020-df09-4b74-8b6a-b0ad1adb24bf","Type":"ContainerDied","Data":"b5a6aacbab3b83af818388523b6c1b7d518da23663fb1e5a0e4aebeb47d4d53a"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.451223 4763 generic.go:334] "Generic (PLEG): container finished" podID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerID="b5a6aacbab3b83af818388523b6c1b7d518da23663fb1e5a0e4aebeb47d4d53a" exitCode=0 Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.453406 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54khp" event={"ID":"03ab1923-fd90-45e6-9513-4ccc9b59667b","Type":"ContainerStarted","Data":"50ed0054acca71984db7fe90fdcc282ca7d62503ab260ed012e283197f39540c"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.456689 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b557d69b-qxvcs" event={"ID":"dee918b0-2519-402f-881e-052ffd7df1c0","Type":"ContainerStarted","Data":"b579e4c92a66e9fc6359c2762f1072fde97ac10c5362ee14e19a99f54219591d"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.458532 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerStarted","Data":"b89b595dd34950f5244aeeadb7e44c3071b2bb5542e6d509898a7bc2201f3968"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.467932 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerStarted","Data":"faa41bc1a2797743bc528d0d97be37323bf39516cf02c75e83c9caa570dfa7a9"} Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.509743 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-54khp" podStartSLOduration=11.50972593 podStartE2EDuration="11.50972593s" podCreationTimestamp="2025-12-06 08:30:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:21.504220351 +0000 UTC m=+1104.079925399" watchObservedRunningTime="2025-12-06 08:30:21.50972593 +0000 UTC m=+1104.085430968" Dec 06 08:30:21 crc kubenswrapper[4763]: W1206 08:30:21.668963 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14ccbedd_bb93_4c1d_82e0_243b3c6c4129.slice/crio-245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b WatchSource:0}: Error finding container 245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b: Status 404 returned error can't find the container with id 245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.706928 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5956b77d5c-kbjvz" podUID="72235379-a68d-41c8-9e7d-2880837b22d7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.821161 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.876889 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:21 crc kubenswrapper[4763]: I1206 08:30:21.905559 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:22 crc kubenswrapper[4763]: I1206 08:30:22.491339 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerStarted","Data":"a1cff9b12259d9a4151cff5b338ea4bad8a4e3a0a7a87a3b9d9317b984bd214a"} Dec 06 08:30:22 crc kubenswrapper[4763]: I1206 08:30:22.498787 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerStarted","Data":"245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b"} Dec 06 08:30:22 crc kubenswrapper[4763]: I1206 08:30:22.986796 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.171053 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume\") pod \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.171131 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume\") pod \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.171325 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqztv\" (UniqueName: \"kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv\") pod \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\" (UID: \"5701fead-fb3b-4eeb-a0a4-279b89a10ee9\") " Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.172558 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume" (OuterVolumeSpecName: "config-volume") pod "5701fead-fb3b-4eeb-a0a4-279b89a10ee9" (UID: "5701fead-fb3b-4eeb-a0a4-279b89a10ee9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.181054 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5701fead-fb3b-4eeb-a0a4-279b89a10ee9" (UID: "5701fead-fb3b-4eeb-a0a4-279b89a10ee9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.181606 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv" (OuterVolumeSpecName: "kube-api-access-dqztv") pod "5701fead-fb3b-4eeb-a0a4-279b89a10ee9" (UID: "5701fead-fb3b-4eeb-a0a4-279b89a10ee9"). InnerVolumeSpecName "kube-api-access-dqztv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.273869 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqztv\" (UniqueName: \"kubernetes.io/projected/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-kube-api-access-dqztv\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.273927 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.273937 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5701fead-fb3b-4eeb-a0a4-279b89a10ee9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.508398 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerStarted","Data":"4aee935eca6d9641966a2427189b3dbff9272b4ce34369a88fd2050d5c6b5fbb"} Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.510488 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" event={"ID":"5701fead-fb3b-4eeb-a0a4-279b89a10ee9","Type":"ContainerDied","Data":"be7bd59ca2b55d49f1d7c2e61fc17fcf39a573243ec129e3a9ac8d311d42c67d"} Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.510517 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be7bd59ca2b55d49f1d7c2e61fc17fcf39a573243ec129e3a9ac8d311d42c67d" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.510576 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256" Dec 06 08:30:23 crc kubenswrapper[4763]: I1206 08:30:23.537137 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=6.53711475 podStartE2EDuration="6.53711475s" podCreationTimestamp="2025-12-06 08:30:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:23.529576527 +0000 UTC m=+1106.105281585" watchObservedRunningTime="2025-12-06 08:30:23.53711475 +0000 UTC m=+1106.112819788" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.542640 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerStarted","Data":"c98b0237102d24fdeb6b9c08bbe03732b31010422b5cda02ae05f2c961e9de50"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.549585 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b557d69b-qxvcs" event={"ID":"dee918b0-2519-402f-881e-052ffd7df1c0","Type":"ContainerStarted","Data":"4b259cb0e0251e528c1a05ac46fdb7637c19412d301939f1a67f495715876f44"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.570716 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerStarted","Data":"172ad6cb4a74ed844abba93af0f99c6c1d603ab40f0ad451c1ac05210cb0c2b8"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.575946 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerStarted","Data":"a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.575996 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerStarted","Data":"99a1bf8d4c3b079d0129a67d319ec3ec91c5fbd5dd9f993498a7de7a07a0d1a4"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.576121 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-log" containerID="cri-o://99a1bf8d4c3b079d0129a67d319ec3ec91c5fbd5dd9f993498a7de7a07a0d1a4" gracePeriod=30 Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.576151 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-httpd" containerID="cri-o://a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079" gracePeriod=30 Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.586639 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5b557d69b-qxvcs" podStartSLOduration=29.586597956 podStartE2EDuration="29.586597956s" podCreationTimestamp="2025-12-06 08:29:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:24.581452687 +0000 UTC m=+1107.157157725" watchObservedRunningTime="2025-12-06 08:30:24.586597956 +0000 UTC m=+1107.162302994" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.597446 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerStarted","Data":"185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.597531 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerStarted","Data":"d8aa215219bd15073d066eb00e2c0e2d63e896cd100b0fc4d75fe64ff7a1cdf4"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.597855 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-log" containerID="cri-o://d8aa215219bd15073d066eb00e2c0e2d63e896cd100b0fc4d75fe64ff7a1cdf4" gracePeriod=30 Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.598701 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-httpd" containerID="cri-o://185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4" gracePeriod=30 Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.608125 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6d9d9cc79d-g6nvn" podStartSLOduration=28.283959569 podStartE2EDuration="29.608107705s" podCreationTimestamp="2025-12-06 08:29:55 +0000 UTC" firstStartedPulling="2025-12-06 08:30:19.355375015 +0000 UTC m=+1101.931080053" lastFinishedPulling="2025-12-06 08:30:20.679523151 +0000 UTC m=+1103.255228189" observedRunningTime="2025-12-06 08:30:24.603397178 +0000 UTC m=+1107.179102216" watchObservedRunningTime="2025-12-06 08:30:24.608107705 +0000 UTC m=+1107.183812743" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.611671 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" event={"ID":"ae997020-df09-4b74-8b6a-b0ad1adb24bf","Type":"ContainerStarted","Data":"808083b0668ec999512b26193dfa4eb1d50771db9b56a1800259b2f8a88701c1"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.612484 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.618590 4763 generic.go:334] "Generic (PLEG): container finished" podID="e6214296-e09d-4c7a-a0ec-2d232793129f" containerID="b281a7e9f56f3253c2eaf1973115c28ec9a09af7202a550620dbe038d02bb95a" exitCode=0 Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.619019 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2j26g" event={"ID":"e6214296-e09d-4c7a-a0ec-2d232793129f","Type":"ContainerDied","Data":"b281a7e9f56f3253c2eaf1973115c28ec9a09af7202a550620dbe038d02bb95a"} Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.619588 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.651880 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" podStartSLOduration=6.651864794 podStartE2EDuration="6.651864794s" podCreationTimestamp="2025-12-06 08:30:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:24.648944625 +0000 UTC m=+1107.224649673" watchObservedRunningTime="2025-12-06 08:30:24.651864794 +0000 UTC m=+1107.227569832" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.655241 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.655227535 podStartE2EDuration="6.655227535s" podCreationTimestamp="2025-12-06 08:30:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:24.62723162 +0000 UTC m=+1107.202936658" watchObservedRunningTime="2025-12-06 08:30:24.655227535 +0000 UTC m=+1107.230932573" Dec 06 08:30:24 crc kubenswrapper[4763]: I1206 08:30:24.692019 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.691999975 podStartE2EDuration="6.691999975s" podCreationTimestamp="2025-12-06 08:30:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:24.669985872 +0000 UTC m=+1107.245690910" watchObservedRunningTime="2025-12-06 08:30:24.691999975 +0000 UTC m=+1107.267705013" Dec 06 08:30:25 crc kubenswrapper[4763]: E1206 08:30:25.311705 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14ccbedd_bb93_4c1d_82e0_243b3c6c4129.slice/crio-conmon-a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca6b7266_95e1_49c4_88ed_0bf99ed0adf0.slice/crio-185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14ccbedd_bb93_4c1d_82e0_243b3c6c4129.slice/crio-a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079.scope\": RecentStats: unable to find data in memory cache]" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.493414 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.493486 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.519489 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.520008 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.642716 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f8hjt" event={"ID":"27e8b45c-35a8-4407-849b-774bd681bf75","Type":"ContainerStarted","Data":"d5a713202af766136564c2d9068a0b2056e360a876a235daf463af5a38943249"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.650867 4763 generic.go:334] "Generic (PLEG): container finished" podID="7731d4cb-7569-4783-842d-acef9e33cb50" containerID="d395750ae10f28014e714905ed31ba307adeacd069c0e51d12ae1353d8d31a2e" exitCode=1 Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.650973 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"d395750ae10f28014e714905ed31ba307adeacd069c0e51d12ae1353d8d31a2e"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.651984 4763 scope.go:117] "RemoveContainer" containerID="d395750ae10f28014e714905ed31ba307adeacd069c0e51d12ae1353d8d31a2e" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659343 4763 generic.go:334] "Generic (PLEG): container finished" podID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerID="a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079" exitCode=0 Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659388 4763 generic.go:334] "Generic (PLEG): container finished" podID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerID="99a1bf8d4c3b079d0129a67d319ec3ec91c5fbd5dd9f993498a7de7a07a0d1a4" exitCode=143 Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659430 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerDied","Data":"a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659473 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerDied","Data":"99a1bf8d4c3b079d0129a67d319ec3ec91c5fbd5dd9f993498a7de7a07a0d1a4"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659484 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"14ccbedd-bb93-4c1d-82e0-243b3c6c4129","Type":"ContainerDied","Data":"245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.659493 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="245364be967e5a0cd0a09ba402374f1ad6a308b329cbc88d33c463db5290f04b" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.665028 4763 generic.go:334] "Generic (PLEG): container finished" podID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerID="185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4" exitCode=0 Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.665063 4763 generic.go:334] "Generic (PLEG): container finished" podID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerID="d8aa215219bd15073d066eb00e2c0e2d63e896cd100b0fc4d75fe64ff7a1cdf4" exitCode=143 Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.666381 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerDied","Data":"185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.666411 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerDied","Data":"d8aa215219bd15073d066eb00e2c0e2d63e896cd100b0fc4d75fe64ff7a1cdf4"} Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.681388 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-f8hjt" podStartSLOduration=2.62039348 podStartE2EDuration="39.681359861s" podCreationTimestamp="2025-12-06 08:29:46 +0000 UTC" firstStartedPulling="2025-12-06 08:29:47.744492161 +0000 UTC m=+1070.320197199" lastFinishedPulling="2025-12-06 08:30:24.805458542 +0000 UTC m=+1107.381163580" observedRunningTime="2025-12-06 08:30:25.661974629 +0000 UTC m=+1108.237679667" watchObservedRunningTime="2025-12-06 08:30:25.681359861 +0000 UTC m=+1108.257064899" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.703370 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.721514 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833638 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833677 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833711 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csxhg\" (UniqueName: \"kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833746 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833804 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833838 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833857 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833871 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833937 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833959 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8pwk\" (UniqueName: \"kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.833986 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.834031 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.834074 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data\") pod \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\" (UID: \"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.834091 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data\") pod \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\" (UID: \"14ccbedd-bb93-4c1d-82e0-243b3c6c4129\") " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.835827 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.837075 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.837260 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs" (OuterVolumeSpecName: "logs") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.840110 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs" (OuterVolumeSpecName: "logs") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.842659 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts" (OuterVolumeSpecName: "scripts") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.842807 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.845992 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.850103 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts" (OuterVolumeSpecName: "scripts") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.853298 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk" (OuterVolumeSpecName: "kube-api-access-f8pwk") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "kube-api-access-f8pwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.863128 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg" (OuterVolumeSpecName: "kube-api-access-csxhg") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "kube-api-access-csxhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.878242 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.886197 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.936651 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.936691 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.936732 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939654 4763 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939694 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939705 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8pwk\" (UniqueName: \"kubernetes.io/projected/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-kube-api-access-f8pwk\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939717 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939727 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939736 4763 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939746 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939804 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csxhg\" (UniqueName: \"kubernetes.io/projected/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-kube-api-access-csxhg\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.939814 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.955068 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data" (OuterVolumeSpecName: "config-data") pod "14ccbedd-bb93-4c1d-82e0-243b3c6c4129" (UID: "14ccbedd-bb93-4c1d-82e0-243b3c6c4129"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.978869 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data" (OuterVolumeSpecName: "config-data") pod "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" (UID: "ca6b7266-95e1-49c4-88ed-0bf99ed0adf0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.979095 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 06 08:30:25 crc kubenswrapper[4763]: I1206 08:30:25.980063 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.037679 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2j26g" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.041731 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.041758 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.041768 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.041778 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ccbedd-bb93-4c1d-82e0-243b3c6c4129-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.143109 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle\") pod \"e6214296-e09d-4c7a-a0ec-2d232793129f\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.143205 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdzmq\" (UniqueName: \"kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq\") pod \"e6214296-e09d-4c7a-a0ec-2d232793129f\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.143298 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config\") pod \"e6214296-e09d-4c7a-a0ec-2d232793129f\" (UID: \"e6214296-e09d-4c7a-a0ec-2d232793129f\") " Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.147588 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq" (OuterVolumeSpecName: "kube-api-access-rdzmq") pod "e6214296-e09d-4c7a-a0ec-2d232793129f" (UID: "e6214296-e09d-4c7a-a0ec-2d232793129f"). InnerVolumeSpecName "kube-api-access-rdzmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.177084 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config" (OuterVolumeSpecName: "config") pod "e6214296-e09d-4c7a-a0ec-2d232793129f" (UID: "e6214296-e09d-4c7a-a0ec-2d232793129f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.198864 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6214296-e09d-4c7a-a0ec-2d232793129f" (UID: "e6214296-e09d-4c7a-a0ec-2d232793129f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.246976 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.247004 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdzmq\" (UniqueName: \"kubernetes.io/projected/e6214296-e09d-4c7a-a0ec-2d232793129f-kube-api-access-rdzmq\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.247016 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e6214296-e09d-4c7a-a0ec-2d232793129f-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.528555 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.528633 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.677484 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ca6b7266-95e1-49c4-88ed-0bf99ed0adf0","Type":"ContainerDied","Data":"a1cff9b12259d9a4151cff5b338ea4bad8a4e3a0a7a87a3b9d9317b984bd214a"} Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.677539 4763 scope.go:117] "RemoveContainer" containerID="185b575a4ab79ff821922f3328844f7ca1324b7eb2279be3b33c904f94baafc4" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.677675 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.684286 4763 generic.go:334] "Generic (PLEG): container finished" podID="03ab1923-fd90-45e6-9513-4ccc9b59667b" containerID="50ed0054acca71984db7fe90fdcc282ca7d62503ab260ed012e283197f39540c" exitCode=0 Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.684355 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54khp" event={"ID":"03ab1923-fd90-45e6-9513-4ccc9b59667b","Type":"ContainerDied","Data":"50ed0054acca71984db7fe90fdcc282ca7d62503ab260ed012e283197f39540c"} Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.690031 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2j26g" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.690049 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2j26g" event={"ID":"e6214296-e09d-4c7a-a0ec-2d232793129f","Type":"ContainerDied","Data":"461aaaa9644b7c60324c5d7521104f94601037e78ebe0fae901fdaabba0ee424"} Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.690177 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="461aaaa9644b7c60324c5d7521104f94601037e78ebe0fae901fdaabba0ee424" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.694144 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5"} Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.695127 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.831342 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.858664 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.871982 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.880956 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.896950 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908050 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908492 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908510 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908532 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5701fead-fb3b-4eeb-a0a4-279b89a10ee9" containerName="collect-profiles" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908537 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="5701fead-fb3b-4eeb-a0a4-279b89a10ee9" containerName="collect-profiles" Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908552 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908558 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908567 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908573 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908581 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6214296-e09d-4c7a-a0ec-2d232793129f" containerName="neutron-db-sync" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908587 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6214296-e09d-4c7a-a0ec-2d232793129f" containerName="neutron-db-sync" Dec 06 08:30:26 crc kubenswrapper[4763]: E1206 08:30:26.908600 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908605 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908788 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908811 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6214296-e09d-4c7a-a0ec-2d232793129f" containerName="neutron-db-sync" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908822 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-log" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908831 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908844 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" containerName="glance-httpd" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.908857 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="5701fead-fb3b-4eeb-a0a4-279b89a10ee9" containerName="collect-profiles" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.909874 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.913695 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.913762 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.913921 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9rxtc" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.913940 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.914022 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.942712 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.944351 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.949475 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.949715 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 06 08:30:26 crc kubenswrapper[4763]: I1206 08:30:26.964610 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.046079 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.082954 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083008 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083056 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083081 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083122 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083152 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083189 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvbtw\" (UniqueName: \"kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083226 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083280 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083317 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083346 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083376 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083402 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2584n\" (UniqueName: \"kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083440 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083469 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.083491 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.111629 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.152600 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.154236 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.165739 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.184931 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.184972 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.184993 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185024 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185044 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185074 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185091 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185118 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185138 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185162 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvbtw\" (UniqueName: \"kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185186 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185222 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185246 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185266 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185284 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185302 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2584n\" (UniqueName: \"kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185872 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.185889 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.186024 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.186593 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.186615 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.186723 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.198136 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.199388 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.202023 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.205651 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.205943 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.207554 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.207935 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2584n\" (UniqueName: \"kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.235421 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvbtw\" (UniqueName: \"kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.238556 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.249880 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.263866 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.285043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287384 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287464 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287608 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4k62\" (UniqueName: \"kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287745 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287913 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.287999 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.292710 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.294263 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.302382 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.302710 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.302880 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.314371 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mfl2g" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.333697 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.389817 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390200 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390270 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390322 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390351 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390404 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvs7v\" (UniqueName: \"kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390445 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390467 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390506 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.390526 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4k62\" (UniqueName: \"kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.391784 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.392492 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.393072 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.393114 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.393479 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.431691 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4k62\" (UniqueName: \"kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62\") pod \"dnsmasq-dns-7c698b9485-rqx2w\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.476345 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.492242 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.492349 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.492425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.492511 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvs7v\" (UniqueName: \"kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.492559 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.496762 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.507594 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.509712 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.522849 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvs7v\" (UniqueName: \"kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.526491 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle\") pod \"neutron-87d7c84fb-vhh22\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.574139 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.589382 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.654854 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.702161 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" containerID="cri-o://808083b0668ec999512b26193dfa4eb1d50771db9b56a1800259b2f8a88701c1" gracePeriod=10 Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.746590 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ccbedd-bb93-4c1d-82e0-243b3c6c4129" path="/var/lib/kubelet/pods/14ccbedd-bb93-4c1d-82e0-243b3c6c4129/volumes" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.747627 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca6b7266-95e1-49c4-88ed-0bf99ed0adf0" path="/var/lib/kubelet/pods/ca6b7266-95e1-49c4-88ed-0bf99ed0adf0/volumes" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.768123 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.819416 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:27 crc kubenswrapper[4763]: I1206 08:30:27.878984 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.137567 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.137893 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.144068 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.714312 4763 generic.go:334] "Generic (PLEG): container finished" podID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerID="808083b0668ec999512b26193dfa4eb1d50771db9b56a1800259b2f8a88701c1" exitCode=0 Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.715228 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" event={"ID":"ae997020-df09-4b74-8b6a-b0ad1adb24bf","Type":"ContainerDied","Data":"808083b0668ec999512b26193dfa4eb1d50771db9b56a1800259b2f8a88701c1"} Dec 06 08:30:28 crc kubenswrapper[4763]: I1206 08:30:28.719982 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.119307 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.163:5353: connect: connection refused" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.709947 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-798696db5c-57lrg"] Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.711518 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.715240 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.715469 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.730388 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-applier-0" podUID="d29048c3-9081-403e-80a2-bd13ee959417" containerName="watcher-applier" containerID="cri-o://e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" gracePeriod=30 Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.736374 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-798696db5c-57lrg"] Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873688 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-ovndb-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873732 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-public-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873876 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cg2v\" (UniqueName: \"kubernetes.io/projected/a6cebba0-2a89-4d1d-b35c-811676cd4459-kube-api-access-6cg2v\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873930 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-combined-ca-bundle\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.873957 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-internal-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.874059 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-httpd-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976139 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-internal-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976225 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-httpd-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976287 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976346 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-ovndb-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976393 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-public-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976483 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cg2v\" (UniqueName: \"kubernetes.io/projected/a6cebba0-2a89-4d1d-b35c-811676cd4459-kube-api-access-6cg2v\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.976521 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-combined-ca-bundle\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.983777 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-httpd-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.984025 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-ovndb-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.984482 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-combined-ca-bundle\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.984756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-config\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.986108 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-internal-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.995640 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6cebba0-2a89-4d1d-b35c-811676cd4459-public-tls-certs\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:29 crc kubenswrapper[4763]: I1206 08:30:29.999156 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cg2v\" (UniqueName: \"kubernetes.io/projected/a6cebba0-2a89-4d1d-b35c-811676cd4459-kube-api-access-6cg2v\") pod \"neutron-798696db5c-57lrg\" (UID: \"a6cebba0-2a89-4d1d-b35c-811676cd4459\") " pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:30 crc kubenswrapper[4763]: I1206 08:30:30.039021 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.494653 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608426 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608508 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4bhn\" (UniqueName: \"kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608656 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608719 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608775 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.608873 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle\") pod \"03ab1923-fd90-45e6-9513-4ccc9b59667b\" (UID: \"03ab1923-fd90-45e6-9513-4ccc9b59667b\") " Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.632012 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.632330 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn" (OuterVolumeSpecName: "kube-api-access-f4bhn") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "kube-api-access-f4bhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.636226 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.647171 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.652290 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts" (OuterVolumeSpecName: "scripts") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.653172 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data" (OuterVolumeSpecName: "config-data") pod "03ab1923-fd90-45e6-9513-4ccc9b59667b" (UID: "03ab1923-fd90-45e6-9513-4ccc9b59667b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711113 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711454 4763 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711469 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4bhn\" (UniqueName: \"kubernetes.io/projected/03ab1923-fd90-45e6-9513-4ccc9b59667b-kube-api-access-f4bhn\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711484 4763 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711495 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.711507 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03ab1923-fd90-45e6-9513-4ccc9b59667b-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.755555 4763 generic.go:334] "Generic (PLEG): container finished" podID="7731d4cb-7569-4783-842d-acef9e33cb50" containerID="d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5" exitCode=1 Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.755630 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5"} Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.756140 4763 scope.go:117] "RemoveContainer" containerID="d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5" Dec 06 08:30:31 crc kubenswrapper[4763]: E1206 08:30:31.756377 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.760932 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54khp" event={"ID":"03ab1923-fd90-45e6-9513-4ccc9b59667b","Type":"ContainerDied","Data":"71f52375f6822ccef041dc2ed2dbf31d74941583604b587fef6a09430a8ca9d8"} Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.760992 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71f52375f6822ccef041dc2ed2dbf31d74941583604b587fef6a09430a8ca9d8" Dec 06 08:30:31 crc kubenswrapper[4763]: I1206 08:30:31.761118 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54khp" Dec 06 08:30:31 crc kubenswrapper[4763]: E1206 08:30:31.822662 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 06 08:30:31 crc kubenswrapper[4763]: E1206 08:30:31.831790 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 06 08:30:31 crc kubenswrapper[4763]: E1206 08:30:31.836057 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Dec 06 08:30:31 crc kubenswrapper[4763]: E1206 08:30:31.836134 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/watcher-applier-0" podUID="d29048c3-9081-403e-80a2-bd13ee959417" containerName="watcher-applier" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.593657 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5fd795fc6-gh6s9"] Dec 06 08:30:32 crc kubenswrapper[4763]: E1206 08:30:32.598244 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03ab1923-fd90-45e6-9513-4ccc9b59667b" containerName="keystone-bootstrap" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.598547 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="03ab1923-fd90-45e6-9513-4ccc9b59667b" containerName="keystone-bootstrap" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.598735 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="03ab1923-fd90-45e6-9513-4ccc9b59667b" containerName="keystone-bootstrap" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.599394 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.601589 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.601635 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.601947 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gm8vr" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.605614 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.605713 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.616930 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.641638 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5fd795fc6-gh6s9"] Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733451 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-internal-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733549 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-fernet-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733706 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hg89\" (UniqueName: \"kubernetes.io/projected/ba11fdba-f596-4394-af61-47b7923fc2a6-kube-api-access-7hg89\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733794 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-public-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733842 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-config-data\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.733970 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-scripts\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.734062 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-combined-ca-bundle\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.734240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-credential-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.775626 4763 generic.go:334] "Generic (PLEG): container finished" podID="27e8b45c-35a8-4407-849b-774bd681bf75" containerID="d5a713202af766136564c2d9068a0b2056e360a876a235daf463af5a38943249" exitCode=0 Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.775705 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f8hjt" event={"ID":"27e8b45c-35a8-4407-849b-774bd681bf75","Type":"ContainerDied","Data":"d5a713202af766136564c2d9068a0b2056e360a876a235daf463af5a38943249"} Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.778863 4763 generic.go:334] "Generic (PLEG): container finished" podID="d29048c3-9081-403e-80a2-bd13ee959417" containerID="e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" exitCode=0 Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.778921 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"d29048c3-9081-403e-80a2-bd13ee959417","Type":"ContainerDied","Data":"e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665"} Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.836730 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-public-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.836796 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-config-data\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.836876 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-scripts\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.836928 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-combined-ca-bundle\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.836963 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-credential-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.837087 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-internal-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.837171 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-fernet-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.837189 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hg89\" (UniqueName: \"kubernetes.io/projected/ba11fdba-f596-4394-af61-47b7923fc2a6-kube-api-access-7hg89\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.844430 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-config-data\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.847397 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-scripts\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.847457 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-credential-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.847480 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-fernet-keys\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.847848 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-public-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.847887 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-combined-ca-bundle\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.848317 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba11fdba-f596-4394-af61-47b7923fc2a6-internal-tls-certs\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.857567 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hg89\" (UniqueName: \"kubernetes.io/projected/ba11fdba-f596-4394-af61-47b7923fc2a6-kube-api-access-7hg89\") pod \"keystone-5fd795fc6-gh6s9\" (UID: \"ba11fdba-f596-4394-af61-47b7923fc2a6\") " pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:32 crc kubenswrapper[4763]: I1206 08:30:32.949397 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.320101 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.320369 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api-log" containerID="cri-o://b89b595dd34950f5244aeeadb7e44c3071b2bb5542e6d509898a7bc2201f3968" gracePeriod=30 Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.320502 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api" containerID="cri-o://4aee935eca6d9641966a2427189b3dbff9272b4ce34369a88fd2050d5c6b5fbb" gracePeriod=30 Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.795790 4763 generic.go:334] "Generic (PLEG): container finished" podID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerID="b89b595dd34950f5244aeeadb7e44c3071b2bb5542e6d509898a7bc2201f3968" exitCode=143 Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.796806 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerDied","Data":"b89b595dd34950f5244aeeadb7e44c3071b2bb5542e6d509898a7bc2201f3968"} Dec 06 08:30:33 crc kubenswrapper[4763]: I1206 08:30:33.846460 4763 scope.go:117] "RemoveContainer" containerID="d8aa215219bd15073d066eb00e2c0e2d63e896cd100b0fc4d75fe64ff7a1cdf4" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.171698 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.244479 4763 scope.go:117] "RemoveContainer" containerID="d395750ae10f28014e714905ed31ba307adeacd069c0e51d12ae1353d8d31a2e" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.275929 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.276059 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.276731 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.277118 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.277213 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxnxl\" (UniqueName: \"kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.277339 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0\") pod \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\" (UID: \"ae997020-df09-4b74-8b6a-b0ad1adb24bf\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.293126 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl" (OuterVolumeSpecName: "kube-api-access-nxnxl") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "kube-api-access-nxnxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.379274 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxnxl\" (UniqueName: \"kubernetes.io/projected/ae997020-df09-4b74-8b6a-b0ad1adb24bf-kube-api-access-nxnxl\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.429861 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.451786 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.483589 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.483620 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.527198 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f8hjt" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.537226 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.540711 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.550675 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config" (OuterVolumeSpecName: "config") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.566692 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ae997020-df09-4b74-8b6a-b0ad1adb24bf" (UID: "ae997020-df09-4b74-8b6a-b0ad1adb24bf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584431 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs\") pod \"27e8b45c-35a8-4407-849b-774bd681bf75\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584503 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68hnx\" (UniqueName: \"kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx\") pod \"27e8b45c-35a8-4407-849b-774bd681bf75\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584533 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle\") pod \"27e8b45c-35a8-4407-849b-774bd681bf75\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584612 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts\") pod \"27e8b45c-35a8-4407-849b-774bd681bf75\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584849 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs" (OuterVolumeSpecName: "logs") pod "27e8b45c-35a8-4407-849b-774bd681bf75" (UID: "27e8b45c-35a8-4407-849b-774bd681bf75"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.584963 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data\") pod \"27e8b45c-35a8-4407-849b-774bd681bf75\" (UID: \"27e8b45c-35a8-4407-849b-774bd681bf75\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.585500 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.585520 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e8b45c-35a8-4407-849b-774bd681bf75-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.585548 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.585560 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae997020-df09-4b74-8b6a-b0ad1adb24bf-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.590230 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx" (OuterVolumeSpecName: "kube-api-access-68hnx") pod "27e8b45c-35a8-4407-849b-774bd681bf75" (UID: "27e8b45c-35a8-4407-849b-774bd681bf75"). InnerVolumeSpecName "kube-api-access-68hnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.591070 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts" (OuterVolumeSpecName: "scripts") pod "27e8b45c-35a8-4407-849b-774bd681bf75" (UID: "27e8b45c-35a8-4407-849b-774bd681bf75"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.623184 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27e8b45c-35a8-4407-849b-774bd681bf75" (UID: "27e8b45c-35a8-4407-849b-774bd681bf75"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.623213 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data" (OuterVolumeSpecName: "config-data") pod "27e8b45c-35a8-4407-849b-774bd681bf75" (UID: "27e8b45c-35a8-4407-849b-774bd681bf75"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.687935 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data\") pod \"d29048c3-9081-403e-80a2-bd13ee959417\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.688370 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjcdp\" (UniqueName: \"kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp\") pod \"d29048c3-9081-403e-80a2-bd13ee959417\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.688433 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle\") pod \"d29048c3-9081-403e-80a2-bd13ee959417\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.688474 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs\") pod \"d29048c3-9081-403e-80a2-bd13ee959417\" (UID: \"d29048c3-9081-403e-80a2-bd13ee959417\") " Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.689023 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.689040 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68hnx\" (UniqueName: \"kubernetes.io/projected/27e8b45c-35a8-4407-849b-774bd681bf75-kube-api-access-68hnx\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.689052 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.689061 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e8b45c-35a8-4407-849b-774bd681bf75-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.689660 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs" (OuterVolumeSpecName: "logs") pod "d29048c3-9081-403e-80a2-bd13ee959417" (UID: "d29048c3-9081-403e-80a2-bd13ee959417"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.691971 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp" (OuterVolumeSpecName: "kube-api-access-vjcdp") pod "d29048c3-9081-403e-80a2-bd13ee959417" (UID: "d29048c3-9081-403e-80a2-bd13ee959417"). InnerVolumeSpecName "kube-api-access-vjcdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.723004 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d29048c3-9081-403e-80a2-bd13ee959417" (UID: "d29048c3-9081-403e-80a2-bd13ee959417"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.743502 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data" (OuterVolumeSpecName: "config-data") pod "d29048c3-9081-403e-80a2-bd13ee959417" (UID: "d29048c3-9081-403e-80a2-bd13ee959417"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.791221 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjcdp\" (UniqueName: \"kubernetes.io/projected/d29048c3-9081-403e-80a2-bd13ee959417-kube-api-access-vjcdp\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.791255 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.791265 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d29048c3-9081-403e-80a2-bd13ee959417-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.791273 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d29048c3-9081-403e-80a2-bd13ee959417-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.812466 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"d29048c3-9081-403e-80a2-bd13ee959417","Type":"ContainerDied","Data":"9aa3f9622ffa62321e7fb8e4a5ac9127e46b7b9220081f91987bec1dab5739b9"} Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.812810 4763 scope.go:117] "RemoveContainer" containerID="e2800b21be9bdfdc65ecf4ecdfcf7d80a267e1bfd117fe3ed429ee9e7d382665" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.812999 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.964776 4763 generic.go:334] "Generic (PLEG): container finished" podID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerID="4aee935eca6d9641966a2427189b3dbff9272b4ce34369a88fd2050d5c6b5fbb" exitCode=0 Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.964890 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerDied","Data":"4aee935eca6d9641966a2427189b3dbff9272b4ce34369a88fd2050d5c6b5fbb"} Dec 06 08:30:34 crc kubenswrapper[4763]: I1206 08:30:34.977486 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.032564 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.069316 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.069525 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" event={"ID":"ae997020-df09-4b74-8b6a-b0ad1adb24bf","Type":"ContainerDied","Data":"53927adf808dd4fa831b5e002067b88928153d19ae741ca213708ff329b281d9"} Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.069608 4763 scope.go:117] "RemoveContainer" containerID="808083b0668ec999512b26193dfa4eb1d50771db9b56a1800259b2f8a88701c1" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.086154 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:35 crc kubenswrapper[4763]: E1206 08:30:35.086750 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" containerName="placement-db-sync" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.087077 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" containerName="placement-db-sync" Dec 06 08:30:35 crc kubenswrapper[4763]: E1206 08:30:35.087180 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="init" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.087234 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="init" Dec 06 08:30:35 crc kubenswrapper[4763]: E1206 08:30:35.087338 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.087589 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" Dec 06 08:30:35 crc kubenswrapper[4763]: E1206 08:30:35.087680 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29048c3-9081-403e-80a2-bd13ee959417" containerName="watcher-applier" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.087735 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29048c3-9081-403e-80a2-bd13ee959417" containerName="watcher-applier" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.088204 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.088416 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d29048c3-9081-403e-80a2-bd13ee959417" containerName="watcher-applier" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.088538 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" containerName="placement-db-sync" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.089218 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jffbq" event={"ID":"e5015508-305d-4f07-a137-85149d98f662","Type":"ContainerStarted","Data":"d0221a9f60fe666a65ee57a39ae231be35ff7085b392d1f5d3f775778ad2b463"} Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.089459 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.094369 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-f8hjt" event={"ID":"27e8b45c-35a8-4407-849b-774bd681bf75","Type":"ContainerDied","Data":"acee28c4d3bb112f7626aed3ffc9f824d024aa7e5021f2aeafbfda519a673d00"} Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.094417 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acee28c4d3bb112f7626aed3ffc9f824d024aa7e5021f2aeafbfda519a673d00" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.094534 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-f8hjt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.095014 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.105391 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-59bf5cd876-p79rt"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.108558 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.115479 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerStarted","Data":"d756452bf5874c1028438a8e0c33e197f75838a921c789ce1158af9efcc7c093"} Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.119362 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.119787 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.119953 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.120063 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.120190 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-x7jlx" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.128299 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.130280 4763 scope.go:117] "RemoveContainer" containerID="b5a6aacbab3b83af818388523b6c1b7d518da23663fb1e5a0e4aebeb47d4d53a" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.196495 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59bf5cd876-p79rt"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.207982 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jffbq" podStartSLOduration=3.34840395 podStartE2EDuration="49.207964508s" podCreationTimestamp="2025-12-06 08:29:46 +0000 UTC" firstStartedPulling="2025-12-06 08:29:48.185875879 +0000 UTC m=+1070.761580917" lastFinishedPulling="2025-12-06 08:30:34.045436437 +0000 UTC m=+1116.621141475" observedRunningTime="2025-12-06 08:30:35.114718756 +0000 UTC m=+1117.690423784" watchObservedRunningTime="2025-12-06 08:30:35.207964508 +0000 UTC m=+1117.783669546" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.256956 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95fb7538-cee1-4ee9-948e-648cf0070047-logs\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257025 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-config-data\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257045 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsbtd\" (UniqueName: \"kubernetes.io/projected/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-kube-api-access-lsbtd\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257073 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257096 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-public-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257112 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-config-data\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257136 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4jh\" (UniqueName: \"kubernetes.io/projected/95fb7538-cee1-4ee9-948e-648cf0070047-kube-api-access-ff4jh\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257152 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-scripts\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257227 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-combined-ca-bundle\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257246 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-internal-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.257283 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-logs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.264426 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.287574 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-755bf67bd9-mp98m"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.316521 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5fd795fc6-gh6s9"] Dec 06 08:30:35 crc kubenswrapper[4763]: W1206 08:30:35.354575 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba11fdba_f596_4394_af61_47b7923fc2a6.slice/crio-f19598044eb2d0e2deab080c9b037c16b1a8bc7397948c4aa515b6cf9a9d2928 WatchSource:0}: Error finding container f19598044eb2d0e2deab080c9b037c16b1a8bc7397948c4aa515b6cf9a9d2928: Status 404 returned error can't find the container with id f19598044eb2d0e2deab080c9b037c16b1a8bc7397948c4aa515b6cf9a9d2928 Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366407 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-public-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366466 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-config-data\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366520 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4jh\" (UniqueName: \"kubernetes.io/projected/95fb7538-cee1-4ee9-948e-648cf0070047-kube-api-access-ff4jh\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366540 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-scripts\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366728 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-combined-ca-bundle\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366773 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-internal-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366826 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-logs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366911 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95fb7538-cee1-4ee9-948e-648cf0070047-logs\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366969 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-config-data\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.366989 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsbtd\" (UniqueName: \"kubernetes.io/projected/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-kube-api-access-lsbtd\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.367051 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.391724 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-logs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.392088 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95fb7538-cee1-4ee9-948e-648cf0070047-logs\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.405281 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-config-data\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.405544 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-scripts\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.430467 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-internal-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.431661 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-combined-ca-bundle\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.431804 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.432071 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-public-tls-certs\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.432370 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fb7538-cee1-4ee9-948e-648cf0070047-config-data\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.477537 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4jh\" (UniqueName: \"kubernetes.io/projected/95fb7538-cee1-4ee9-948e-648cf0070047-kube-api-access-ff4jh\") pod \"watcher-applier-0\" (UID: \"95fb7538-cee1-4ee9-948e-648cf0070047\") " pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.478814 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsbtd\" (UniqueName: \"kubernetes.io/projected/e3b30be5-5ea4-4c91-a21f-f5d2c48670e7-kube-api-access-lsbtd\") pod \"placement-59bf5cd876-p79rt\" (UID: \"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7\") " pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.503424 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.522724 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.527133 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5b557d69b-qxvcs" podUID="dee918b0-2519-402f-881e-052ffd7df1c0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.548046 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 06 08:30:35 crc kubenswrapper[4763]: W1206 08:30:35.549140 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6cebba0_2a89_4d1d_b35c_811676cd4459.slice/crio-093512c4a8af67c11d9471c9f668dfcebfafe9e03c39da8897178176897fcc52 WatchSource:0}: Error finding container 093512c4a8af67c11d9471c9f668dfcebfafe9e03c39da8897178176897fcc52: Status 404 returned error can't find the container with id 093512c4a8af67c11d9471c9f668dfcebfafe9e03c39da8897178176897fcc52 Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.553122 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-798696db5c-57lrg"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.589443 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.650889 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.653716 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.781378 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" path="/var/lib/kubelet/pods/ae997020-df09-4b74-8b6a-b0ad1adb24bf/volumes" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.782444 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d29048c3-9081-403e-80a2-bd13ee959417" path="/var/lib/kubelet/pods/d29048c3-9081-403e-80a2-bd13ee959417/volumes" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.792932 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data\") pod \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.793012 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhcnp\" (UniqueName: \"kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp\") pod \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.793073 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca\") pod \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.793206 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs\") pod \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.793243 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle\") pod \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\" (UID: \"b70edee2-cbdf-4f64-8763-9405b3b6a93c\") " Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.800695 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs" (OuterVolumeSpecName: "logs") pod "b70edee2-cbdf-4f64-8763-9405b3b6a93c" (UID: "b70edee2-cbdf-4f64-8763-9405b3b6a93c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.819653 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp" (OuterVolumeSpecName: "kube-api-access-mhcnp") pod "b70edee2-cbdf-4f64-8763-9405b3b6a93c" (UID: "b70edee2-cbdf-4f64-8763-9405b3b6a93c"). InnerVolumeSpecName "kube-api-access-mhcnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.848042 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b70edee2-cbdf-4f64-8763-9405b3b6a93c" (UID: "b70edee2-cbdf-4f64-8763-9405b3b6a93c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.865126 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "b70edee2-cbdf-4f64-8763-9405b3b6a93c" (UID: "b70edee2-cbdf-4f64-8763-9405b3b6a93c"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.901475 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhcnp\" (UniqueName: \"kubernetes.io/projected/b70edee2-cbdf-4f64-8763-9405b3b6a93c-kube-api-access-mhcnp\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.901507 4763 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.901519 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b70edee2-cbdf-4f64-8763-9405b3b6a93c-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.901532 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:35 crc kubenswrapper[4763]: I1206 08:30:35.946400 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data" (OuterVolumeSpecName: "config-data") pod "b70edee2-cbdf-4f64-8763-9405b3b6a93c" (UID: "b70edee2-cbdf-4f64-8763-9405b3b6a93c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.006527 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b70edee2-cbdf-4f64-8763-9405b3b6a93c-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.130830 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-798696db5c-57lrg" event={"ID":"a6cebba0-2a89-4d1d-b35c-811676cd4459","Type":"ContainerStarted","Data":"093512c4a8af67c11d9471c9f668dfcebfafe9e03c39da8897178176897fcc52"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.132992 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerStarted","Data":"57afeacb5f6333eec9ac2843dbcab227bc4943b5575951c20face82ad6fd36a3"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.133911 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" event={"ID":"f2782736-123f-4570-a129-af3317738af3","Type":"ContainerStarted","Data":"ba5fccb7334f4331737e81eed6f2b59c03c90c08491d00fd39d47839d3ad5459"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.139215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-888r5" event={"ID":"4f62869c-d491-4a12-a88c-1a58ef5b1bea","Type":"ContainerStarted","Data":"543c64ae689480077e756c5921088c9cdf08b4b3459cab34200bc4c7d71087a2"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.148930 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5fd795fc6-gh6s9" event={"ID":"ba11fdba-f596-4394-af61-47b7923fc2a6","Type":"ContainerStarted","Data":"1487eebaf142e88607c9080a3cc1d784c321e1b3a7061c786c774464ef6c5e0a"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.148971 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5fd795fc6-gh6s9" event={"ID":"ba11fdba-f596-4394-af61-47b7923fc2a6","Type":"ContainerStarted","Data":"f19598044eb2d0e2deab080c9b037c16b1a8bc7397948c4aa515b6cf9a9d2928"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.149845 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.155764 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b70edee2-cbdf-4f64-8763-9405b3b6a93c","Type":"ContainerDied","Data":"1ea499137985a49e7bec6f2582f0d3ef8e7c98c5ac9ce64769aa1e2c81807e1b"} Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.155807 4763 scope.go:117] "RemoveContainer" containerID="4aee935eca6d9641966a2427189b3dbff9272b4ce34369a88fd2050d5c6b5fbb" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.155961 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.204066 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.235651 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-888r5" podStartSLOduration=4.634743782 podStartE2EDuration="51.235602515s" podCreationTimestamp="2025-12-06 08:29:45 +0000 UTC" firstStartedPulling="2025-12-06 08:29:47.477958793 +0000 UTC m=+1070.053663831" lastFinishedPulling="2025-12-06 08:30:34.078817526 +0000 UTC m=+1116.654522564" observedRunningTime="2025-12-06 08:30:36.167728936 +0000 UTC m=+1118.743433974" watchObservedRunningTime="2025-12-06 08:30:36.235602515 +0000 UTC m=+1118.811307553" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.291528 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5fd795fc6-gh6s9" podStartSLOduration=4.291505551 podStartE2EDuration="4.291505551s" podCreationTimestamp="2025-12-06 08:30:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:36.198455214 +0000 UTC m=+1118.774160252" watchObservedRunningTime="2025-12-06 08:30:36.291505551 +0000 UTC m=+1118.867210589" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.304188 4763 scope.go:117] "RemoveContainer" containerID="b89b595dd34950f5244aeeadb7e44c3071b2bb5542e6d509898a7bc2201f3968" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.313707 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.413000 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.417272 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59bf5cd876-p79rt"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.431702 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: E1206 08:30:36.432649 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api-log" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.432761 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api-log" Dec 06 08:30:36 crc kubenswrapper[4763]: E1206 08:30:36.432861 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.432958 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.433227 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api-log" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.433298 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" containerName="watcher-api" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.434630 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.438555 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.438677 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.438564 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.453195 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.527756 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.527833 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.528647 4763 scope.go:117] "RemoveContainer" containerID="d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5" Dec 06 08:30:36 crc kubenswrapper[4763]: E1206 08:30:36.528948 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542217 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-public-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542335 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542378 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-config-data\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542406 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8b9x\" (UniqueName: \"kubernetes.io/projected/50364316-df96-4310-8365-1226050a1a58-kube-api-access-g8b9x\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542480 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542516 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50364316-df96-4310-8365-1226050a1a58-logs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.542554 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.568266 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: W1206 08:30:36.586813 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95fb7538_cee1_4ee9_948e_648cf0070047.slice/crio-2ad60b971718be411fa032d3c527e791ebaf9617fba5795e0e11d4bbc7a98991 WatchSource:0}: Error finding container 2ad60b971718be411fa032d3c527e791ebaf9617fba5795e0e11d4bbc7a98991: Status 404 returned error can't find the container with id 2ad60b971718be411fa032d3c527e791ebaf9617fba5795e0e11d4bbc7a98991 Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.610659 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645420 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50364316-df96-4310-8365-1226050a1a58-logs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645500 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645571 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-public-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645656 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645699 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-config-data\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.645727 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8b9x\" (UniqueName: \"kubernetes.io/projected/50364316-df96-4310-8365-1226050a1a58-kube-api-access-g8b9x\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.648265 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50364316-df96-4310-8365-1226050a1a58-logs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.651063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.656241 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.656270 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-config-data\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.657053 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.659466 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50364316-df96-4310-8365-1226050a1a58-public-tls-certs\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.663644 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8b9x\" (UniqueName: \"kubernetes.io/projected/50364316-df96-4310-8365-1226050a1a58-kube-api-access-g8b9x\") pod \"watcher-api-0\" (UID: \"50364316-df96-4310-8365-1226050a1a58\") " pod="openstack/watcher-api-0" Dec 06 08:30:36 crc kubenswrapper[4763]: I1206 08:30:36.898799 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.197270 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"95fb7538-cee1-4ee9-948e-648cf0070047","Type":"ContainerStarted","Data":"5488e69d9223ccb9ea0d1b3559b47b6fd003144b9673311d27400b2170666441"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.197617 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"95fb7538-cee1-4ee9-948e-648cf0070047","Type":"ContainerStarted","Data":"2ad60b971718be411fa032d3c527e791ebaf9617fba5795e0e11d4bbc7a98991"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.224243 4763 generic.go:334] "Generic (PLEG): container finished" podID="f2782736-123f-4570-a129-af3317738af3" containerID="d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829" exitCode=0 Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.224439 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" event={"ID":"f2782736-123f-4570-a129-af3317738af3","Type":"ContainerDied","Data":"d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.227039 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=3.227020006 podStartE2EDuration="3.227020006s" podCreationTimestamp="2025-12-06 08:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:37.221103336 +0000 UTC m=+1119.796808394" watchObservedRunningTime="2025-12-06 08:30:37.227020006 +0000 UTC m=+1119.802725064" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.229640 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerStarted","Data":"f0e3edde6d2aac968901206fffbce40f35180ade9bae5169c43bb2594a99565b"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.240593 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerStarted","Data":"eb06050263e65f121a7716bb6410dca9fd5fcd1fe4cf61de28583647e9ca596a"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.253145 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59bf5cd876-p79rt" event={"ID":"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7","Type":"ContainerStarted","Data":"6d60cb91c59f5b50be91bbde0baf83159dffc967ae613061449a17efe40d3e9d"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.275827 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-798696db5c-57lrg" event={"ID":"a6cebba0-2a89-4d1d-b35c-811676cd4459","Type":"ContainerStarted","Data":"be8fc5aed93a61069ca78c9a082c862f7eb75fdb7c09fc6a737bd1e88abd127d"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.275864 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-798696db5c-57lrg" event={"ID":"a6cebba0-2a89-4d1d-b35c-811676cd4459","Type":"ContainerStarted","Data":"0763396645d8875f8595685b02da104b03b1dd94acedf1ea9bba0ddb2783e1d9"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.276248 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.288078 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerStarted","Data":"84a2d8258f27cce4329ff171d4ee53f0481ce71a4b0e51e9d76954f3a9fe17b9"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.288115 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerStarted","Data":"ec94c6fed6b7ebb37f96e9a6f6f313ecf88d2d351928a34200a528cdcbd90e04"} Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.288133 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.327693 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-798696db5c-57lrg" podStartSLOduration=8.327675617 podStartE2EDuration="8.327675617s" podCreationTimestamp="2025-12-06 08:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:37.322613151 +0000 UTC m=+1119.898318209" watchObservedRunningTime="2025-12-06 08:30:37.327675617 +0000 UTC m=+1119.903380655" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.353466 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-87d7c84fb-vhh22" podStartSLOduration=10.353447382 podStartE2EDuration="10.353447382s" podCreationTimestamp="2025-12-06 08:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:37.348761596 +0000 UTC m=+1119.924466634" watchObservedRunningTime="2025-12-06 08:30:37.353447382 +0000 UTC m=+1119.929152420" Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.633787 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 06 08:30:37 crc kubenswrapper[4763]: W1206 08:30:37.656131 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50364316_df96_4310_8365_1226050a1a58.slice/crio-7dc506d47b60c21d7081deb50b2b7edb3067474d7bd4c2fbf1d4522f30cdf7b7 WatchSource:0}: Error finding container 7dc506d47b60c21d7081deb50b2b7edb3067474d7bd4c2fbf1d4522f30cdf7b7: Status 404 returned error can't find the container with id 7dc506d47b60c21d7081deb50b2b7edb3067474d7bd4c2fbf1d4522f30cdf7b7 Dec 06 08:30:37 crc kubenswrapper[4763]: I1206 08:30:37.757956 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b70edee2-cbdf-4f64-8763-9405b3b6a93c" path="/var/lib/kubelet/pods/b70edee2-cbdf-4f64-8763-9405b3b6a93c/volumes" Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.360129 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59bf5cd876-p79rt" event={"ID":"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7","Type":"ContainerStarted","Data":"f1f85415e9e240043486347c3b62dbb79129b45fd60772d54759939256cd93ea"} Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.360664 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59bf5cd876-p79rt" event={"ID":"e3b30be5-5ea4-4c91-a21f-f5d2c48670e7","Type":"ContainerStarted","Data":"710618dda151955adeb867c3218f3ff62e43ccd5dab5a29bbd10cf98fccb0d5e"} Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.361087 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.361215 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.365789 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"50364316-df96-4310-8365-1226050a1a58","Type":"ContainerStarted","Data":"7dc506d47b60c21d7081deb50b2b7edb3067474d7bd4c2fbf1d4522f30cdf7b7"} Dec 06 08:30:38 crc kubenswrapper[4763]: I1206 08:30:38.391415 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-59bf5cd876-p79rt" podStartSLOduration=4.391389976 podStartE2EDuration="4.391389976s" podCreationTimestamp="2025-12-06 08:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:38.381426188 +0000 UTC m=+1120.957131226" watchObservedRunningTime="2025-12-06 08:30:38.391389976 +0000 UTC m=+1120.967095014" Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.108008 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-755bf67bd9-mp98m" podUID="ae997020-df09-4b74-8b6a-b0ad1adb24bf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.163:5353: i/o timeout" Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.390351 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" event={"ID":"f2782736-123f-4570-a129-af3317738af3","Type":"ContainerStarted","Data":"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5"} Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.390550 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.393270 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerStarted","Data":"23b9588da28ae21a6c7103f6638ff5ab0bc536c0da94d1411d3a43609732d58f"} Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.394857 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerStarted","Data":"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9"} Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.398603 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"50364316-df96-4310-8365-1226050a1a58","Type":"ContainerStarted","Data":"bafa4892e0824a28f2a9132595eef8ab75973bbcc27e3535ef31df985d53daf5"} Dec 06 08:30:39 crc kubenswrapper[4763]: I1206 08:30:39.423832 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" podStartSLOduration=12.423810442 podStartE2EDuration="12.423810442s" podCreationTimestamp="2025-12-06 08:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:39.412735674 +0000 UTC m=+1121.988440712" watchObservedRunningTime="2025-12-06 08:30:39.423810442 +0000 UTC m=+1121.999515480" Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.408444 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerStarted","Data":"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1"} Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.411814 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"50364316-df96-4310-8365-1226050a1a58","Type":"ContainerStarted","Data":"355c380b137b9babac7d173e34ebe16f89dcab1c77b4836252e78af120434bfd"} Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.412712 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.416302 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerStarted","Data":"2c87cac4c45b34d6ade9c2630d14bf3a69128aee8227bc164c65239c1de87e46"} Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.435667 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=14.435648353 podStartE2EDuration="14.435648353s" podCreationTimestamp="2025-12-06 08:30:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:40.428294445 +0000 UTC m=+1123.003999493" watchObservedRunningTime="2025-12-06 08:30:40.435648353 +0000 UTC m=+1123.011353391" Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.455003 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=4.454981514 podStartE2EDuration="4.454981514s" podCreationTimestamp="2025-12-06 08:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:40.445120478 +0000 UTC m=+1123.020825516" watchObservedRunningTime="2025-12-06 08:30:40.454981514 +0000 UTC m=+1123.030686552" Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.472868 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=14.472853826 podStartE2EDuration="14.472853826s" podCreationTimestamp="2025-12-06 08:30:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:40.464680035 +0000 UTC m=+1123.040385073" watchObservedRunningTime="2025-12-06 08:30:40.472853826 +0000 UTC m=+1123.048558864" Dec 06 08:30:40 crc kubenswrapper[4763]: I1206 08:30:40.549697 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 06 08:30:41 crc kubenswrapper[4763]: I1206 08:30:41.426607 4763 generic.go:334] "Generic (PLEG): container finished" podID="e5015508-305d-4f07-a137-85149d98f662" containerID="d0221a9f60fe666a65ee57a39ae231be35ff7085b392d1f5d3f775778ad2b463" exitCode=0 Dec 06 08:30:41 crc kubenswrapper[4763]: I1206 08:30:41.426722 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jffbq" event={"ID":"e5015508-305d-4f07-a137-85149d98f662","Type":"ContainerDied","Data":"d0221a9f60fe666a65ee57a39ae231be35ff7085b392d1f5d3f775778ad2b463"} Dec 06 08:30:41 crc kubenswrapper[4763]: I1206 08:30:41.899870 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 06 08:30:42 crc kubenswrapper[4763]: I1206 08:30:42.443276 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:30:42 crc kubenswrapper[4763]: I1206 08:30:42.540249 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:30:42 crc kubenswrapper[4763]: I1206 08:30:42.540313 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:30:42 crc kubenswrapper[4763]: I1206 08:30:42.999661 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 06 08:30:44 crc kubenswrapper[4763]: I1206 08:30:44.464435 4763 generic.go:334] "Generic (PLEG): container finished" podID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" containerID="543c64ae689480077e756c5921088c9cdf08b4b3459cab34200bc4c7d71087a2" exitCode=0 Dec 06 08:30:44 crc kubenswrapper[4763]: I1206 08:30:44.464501 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-888r5" event={"ID":"4f62869c-d491-4a12-a88c-1a58ef5b1bea","Type":"ContainerDied","Data":"543c64ae689480077e756c5921088c9cdf08b4b3459cab34200bc4c7d71087a2"} Dec 06 08:30:45 crc kubenswrapper[4763]: I1206 08:30:45.552149 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 06 08:30:45 crc kubenswrapper[4763]: I1206 08:30:45.595655 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.195813 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jffbq" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.277193 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle\") pod \"e5015508-305d-4f07-a137-85149d98f662\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.277718 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data\") pod \"e5015508-305d-4f07-a137-85149d98f662\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.277918 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz4jc\" (UniqueName: \"kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc\") pod \"e5015508-305d-4f07-a137-85149d98f662\" (UID: \"e5015508-305d-4f07-a137-85149d98f662\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.286320 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc" (OuterVolumeSpecName: "kube-api-access-qz4jc") pod "e5015508-305d-4f07-a137-85149d98f662" (UID: "e5015508-305d-4f07-a137-85149d98f662"). InnerVolumeSpecName "kube-api-access-qz4jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.288170 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e5015508-305d-4f07-a137-85149d98f662" (UID: "e5015508-305d-4f07-a137-85149d98f662"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.321342 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5015508-305d-4f07-a137-85149d98f662" (UID: "e5015508-305d-4f07-a137-85149d98f662"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.379922 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.379957 4763 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5015508-305d-4f07-a137-85149d98f662-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.379972 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz4jc\" (UniqueName: \"kubernetes.io/projected/e5015508-305d-4f07-a137-85149d98f662-kube-api-access-qz4jc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.490459 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jffbq" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.491403 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jffbq" event={"ID":"e5015508-305d-4f07-a137-85149d98f662","Type":"ContainerDied","Data":"3cf16c4d3a7d668eb51730ee3fd1a3e97a125ab05a811b5b65e06d0c88acb2ce"} Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.491441 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cf16c4d3a7d668eb51730ee3fd1a3e97a125ab05a811b5b65e06d0c88acb2ce" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.524021 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.527600 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.527641 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.534793 4763 scope.go:117] "RemoveContainer" containerID="d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.779299 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-888r5" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.887434 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.887888 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.888049 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.888172 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.888221 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.888272 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t54lp\" (UniqueName: \"kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp\") pod \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\" (UID: \"4f62869c-d491-4a12-a88c-1a58ef5b1bea\") " Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.889196 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.893768 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.894085 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp" (OuterVolumeSpecName: "kube-api-access-t54lp") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "kube-api-access-t54lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.894491 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts" (OuterVolumeSpecName: "scripts") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.900978 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.913907 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.933991 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.967493 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data" (OuterVolumeSpecName: "config-data") pod "4f62869c-d491-4a12-a88c-1a58ef5b1bea" (UID: "4f62869c-d491-4a12-a88c-1a58ef5b1bea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990737 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990786 4763 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f62869c-d491-4a12-a88c-1a58ef5b1bea-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990797 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t54lp\" (UniqueName: \"kubernetes.io/projected/4f62869c-d491-4a12-a88c-1a58ef5b1bea-kube-api-access-t54lp\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990806 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990815 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:46 crc kubenswrapper[4763]: I1206 08:30:46.990823 4763 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f62869c-d491-4a12-a88c-1a58ef5b1bea-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.389112 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.408370 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.431386 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-c9b5f6d4f-l8lbt"] Dec 06 08:30:47 crc kubenswrapper[4763]: E1206 08:30:47.431858 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5015508-305d-4f07-a137-85149d98f662" containerName="barbican-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.431878 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5015508-305d-4f07-a137-85149d98f662" containerName="barbican-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: E1206 08:30:47.431916 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" containerName="cinder-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.431927 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" containerName="cinder-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.432159 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" containerName="cinder-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.432185 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5015508-305d-4f07-a137-85149d98f662" containerName="barbican-db-sync" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.433386 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.438107 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hrt54" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.438186 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.438336 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.449435 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-c9b5f6d4f-l8lbt"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.480073 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.506192 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-888r5" event={"ID":"4f62869c-d491-4a12-a88c-1a58ef5b1bea","Type":"ContainerDied","Data":"f5080d9cd503bbe606ccecb73fa65ab599c2acaec0cc787aeded55772a090776"} Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.506239 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5080d9cd503bbe606ccecb73fa65ab599c2acaec0cc787aeded55772a090776" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.506344 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-888r5" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.512085 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.512933 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerStarted","Data":"a633c77ac961ec6e2ebffa3f109c537f5abef5ebfa1d2693ecd21e5983b9375a"} Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.513194 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-central-agent" containerID="cri-o://0020985e174ca9d052a7b02fa6c2b67bd9b122c6106e8acf26a9cb35d7098a08" gracePeriod=30 Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.513334 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.513396 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="sg-core" containerID="cri-o://d756452bf5874c1028438a8e0c33e197f75838a921c789ce1158af9efcc7c093" gracePeriod=30 Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.513488 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="proxy-httpd" containerID="cri-o://a633c77ac961ec6e2ebffa3f109c537f5abef5ebfa1d2693ecd21e5983b9375a" gracePeriod=30 Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.514061 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-notification-agent" containerID="cri-o://c98b0237102d24fdeb6b9c08bbe03732b31010422b5cda02ae05f2c961e9de50" gracePeriod=30 Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.536734 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-combined-ca-bundle\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.537021 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-logs\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.537107 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6ltd\" (UniqueName: \"kubernetes.io/projected/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-kube-api-access-x6ltd\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.537365 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data-custom\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.577532 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0"} Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.581001 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.581511 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.591364 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.591406 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.620374 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.641750 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-combined-ca-bundle\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.641856 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-logs\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.641914 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6ltd\" (UniqueName: \"kubernetes.io/projected/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-kube-api-access-x6ltd\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.647089 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data-custom\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.679621 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-logs\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.685849 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.703353 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-8549689586-h89zw"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.704976 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.704967 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data-custom\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.728767 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-config-data\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.729730 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.747828 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6ltd\" (UniqueName: \"kubernetes.io/projected/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-kube-api-access-x6ltd\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.757157 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6-combined-ca-bundle\") pod \"barbican-worker-c9b5f6d4f-l8lbt\" (UID: \"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6\") " pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.802729 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8549689586-h89zw"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.802919 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.802990 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.803036 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.803068 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.859982 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.869018 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="dnsmasq-dns" containerID="cri-o://0b5fca88a783e5405c5fafef74d7d49576e5a06c19a0fad7fa4e76b59b8324ab" gracePeriod=10 Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.889795 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data-custom\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.889858 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.890014 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73a66c39-800c-426e-a24b-a95a37280ebd-logs\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.890072 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn4z5\" (UniqueName: \"kubernetes.io/projected/73a66c39-800c-426e-a24b-a95a37280ebd-kube-api-access-xn4z5\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.890135 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-combined-ca-bundle\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.894470 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.796745811 podStartE2EDuration="1m1.894443339s" podCreationTimestamp="2025-12-06 08:29:46 +0000 UTC" firstStartedPulling="2025-12-06 08:29:47.75632357 +0000 UTC m=+1070.332028608" lastFinishedPulling="2025-12-06 08:30:46.854021098 +0000 UTC m=+1129.429726136" observedRunningTime="2025-12-06 08:30:47.729347311 +0000 UTC m=+1130.305052349" watchObservedRunningTime="2025-12-06 08:30:47.894443339 +0000 UTC m=+1130.470148377" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.935407 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.937493 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.939871 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.943055 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.995843 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn4z5\" (UniqueName: \"kubernetes.io/projected/73a66c39-800c-426e-a24b-a95a37280ebd-kube-api-access-xn4z5\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.996239 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:47 crc kubenswrapper[4763]: I1206 08:30:47.998803 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:47.999981 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-combined-ca-bundle\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.000191 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh66l\" (UniqueName: \"kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.000390 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.008917 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data-custom\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.010305 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.014179 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.014327 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.014429 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73a66c39-800c-426e-a24b-a95a37280ebd-logs\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.015427 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73a66c39-800c-426e-a24b-a95a37280ebd-logs\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.010644 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-combined-ca-bundle\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.019449 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.032999 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73a66c39-800c-426e-a24b-a95a37280ebd-config-data-custom\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.047981 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn4z5\" (UniqueName: \"kubernetes.io/projected/73a66c39-800c-426e-a24b-a95a37280ebd-kube-api-access-xn4z5\") pod \"barbican-keystone-listener-8549689586-h89zw\" (UID: \"73a66c39-800c-426e-a24b-a95a37280ebd\") " pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.062971 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.064855 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.078306 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116341 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116388 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116442 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116501 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116532 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnbfx\" (UniqueName: \"kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116575 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116601 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116620 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116657 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.116675 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh66l\" (UniqueName: \"kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.117183 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.118650 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.120087 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.120150 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.120883 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.123028 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.130489 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.193161 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8549689586-h89zw" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.199669 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh66l\" (UniqueName: \"kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l\") pod \"dnsmasq-dns-7c6986c5b5-zdhnl\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.223225 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnbfx\" (UniqueName: \"kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.223296 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.223349 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.223388 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.223597 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.235928 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.256056 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.257591 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.262340 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.299529 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.308993 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnbfx\" (UniqueName: \"kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx\") pod \"barbican-api-6dfb477846-mf82l\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.343768 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.346693 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.356403 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.361438 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.361546 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-6r692" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.361658 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.382094 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.419287 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.429310 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.429359 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.429395 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.429634 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-942vb\" (UniqueName: \"kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.430063 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.430218 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.453646 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.456361 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.491440 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534012 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534590 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534649 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534681 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534703 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llw72\" (UniqueName: \"kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534735 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534765 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534826 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534856 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534878 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534937 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.534991 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-942vb\" (UniqueName: \"kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.536124 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.560320 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.562942 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.568923 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.588610 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.600988 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.603046 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.609447 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.610091 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.610704 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-942vb\" (UniqueName: \"kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.647808 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " pod="openstack/cinder-scheduler-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.662382 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.662623 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.662767 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.662984 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663089 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663185 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llw72\" (UniqueName: \"kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663282 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663427 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663540 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663732 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663824 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.663971 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg7gw\" (UniqueName: \"kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.664103 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.667001 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.701228 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.702559 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.703017 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.703098 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.722740 4763 generic.go:334] "Generic (PLEG): container finished" podID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerID="0b5fca88a783e5405c5fafef74d7d49576e5a06c19a0fad7fa4e76b59b8324ab" exitCode=0 Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.722867 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" event={"ID":"9c55cdaa-1dd9-4c6e-937e-da63410a649d","Type":"ContainerDied","Data":"0b5fca88a783e5405c5fafef74d7d49576e5a06c19a0fad7fa4e76b59b8324ab"} Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.755307 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llw72\" (UniqueName: \"kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72\") pod \"dnsmasq-dns-84c44f58df-s6697\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776288 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776436 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776468 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776543 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776582 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776658 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.776727 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg7gw\" (UniqueName: \"kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.792545 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.808658 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.808758 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.808870 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.809061 4763 generic.go:334] "Generic (PLEG): container finished" podID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerID="a633c77ac961ec6e2ebffa3f109c537f5abef5ebfa1d2693ecd21e5983b9375a" exitCode=0 Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.809087 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerDied","Data":"a633c77ac961ec6e2ebffa3f109c537f5abef5ebfa1d2693ecd21e5983b9375a"} Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.820526 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerDied","Data":"d756452bf5874c1028438a8e0c33e197f75838a921c789ce1158af9efcc7c093"} Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.816858 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.813431 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.820429 4763 generic.go:334] "Generic (PLEG): container finished" podID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerID="d756452bf5874c1028438a8e0c33e197f75838a921c789ce1158af9efcc7c093" exitCode=2 Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.827603 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg7gw\" (UniqueName: \"kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw\") pod \"cinder-api-0\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " pod="openstack/cinder-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.836029 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.836076 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.836086 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.836097 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:48 crc kubenswrapper[4763]: I1206 08:30:48.914350 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.041516 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.052754 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-c9b5f6d4f-l8lbt"] Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.057633 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.335773 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8549689586-h89zw"] Dec 06 08:30:49 crc kubenswrapper[4763]: W1206 08:30:49.375865 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73a66c39_800c_426e_a24b_a95a37280ebd.slice/crio-aefffc4617c45c5e371ff7d43538c64b2e9a126fd1805fe8bcfda2e9a8f361f6 WatchSource:0}: Error finding container aefffc4617c45c5e371ff7d43538c64b2e9a126fd1805fe8bcfda2e9a8f361f6: Status 404 returned error can't find the container with id aefffc4617c45c5e371ff7d43538c64b2e9a126fd1805fe8bcfda2e9a8f361f6 Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.466373 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.474218 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629278 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629704 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629722 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629791 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.629847 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb6bs\" (UniqueName: \"kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs\") pod \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\" (UID: \"9c55cdaa-1dd9-4c6e-937e-da63410a649d\") " Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.648087 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs" (OuterVolumeSpecName: "kube-api-access-hb6bs") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "kube-api-access-hb6bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.709853 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.751989 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.752033 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb6bs\" (UniqueName: \"kubernetes.io/projected/9c55cdaa-1dd9-4c6e-937e-da63410a649d-kube-api-access-hb6bs\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.759828 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.760221 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config" (OuterVolumeSpecName: "config") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.780401 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.806579 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.827444 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9c55cdaa-1dd9-4c6e-937e-da63410a649d" (UID: "9c55cdaa-1dd9-4c6e-937e-da63410a649d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.854829 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.854867 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.854883 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.854911 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55cdaa-1dd9-4c6e-937e-da63410a649d-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.873806 4763 generic.go:334] "Generic (PLEG): container finished" podID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerID="0020985e174ca9d052a7b02fa6c2b67bd9b122c6106e8acf26a9cb35d7098a08" exitCode=0 Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.873846 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerDied","Data":"0020985e174ca9d052a7b02fa6c2b67bd9b122c6106e8acf26a9cb35d7098a08"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.880276 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" event={"ID":"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6","Type":"ContainerStarted","Data":"ecf3a8780df8924a7f39dea1a595a66b77298568b7e60a71f32d57805fd8eea3"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.926673 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" event={"ID":"3af3ecac-eec4-49da-9fd7-14f74af80acf","Type":"ContainerStarted","Data":"03457159cd72c09e00d251320119333757a7a75937495d55a61c74e598c3b7d2"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.929758 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8549689586-h89zw" event={"ID":"73a66c39-800c-426e-a24b-a95a37280ebd","Type":"ContainerStarted","Data":"aefffc4617c45c5e371ff7d43538c64b2e9a126fd1805fe8bcfda2e9a8f361f6"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.934876 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerStarted","Data":"83dbb3504c7f7be62dd21708c1f5d4cd42cb4510bb0518c67d17c295f9eaffce"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.948525 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" event={"ID":"9c55cdaa-1dd9-4c6e-937e-da63410a649d","Type":"ContainerDied","Data":"2b408a8ddfd3ba6a8ef89d97985151dbe7c9f24a660f297029a3abfa27bc243c"} Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.948582 4763 scope.go:117] "RemoveContainer" containerID="0b5fca88a783e5405c5fafef74d7d49576e5a06c19a0fad7fa4e76b59b8324ab" Dec 06 08:30:49 crc kubenswrapper[4763]: I1206 08:30:49.948745 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-656f7475df-b6hdj" Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.041267 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.061869 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.062067 4763 scope.go:117] "RemoveContainer" containerID="73e5671153dd70acb463cbee67f5a4324905a3a1eaf9737c9478a39acf102aaa" Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.088688 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.112931 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.134242 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-656f7475df-b6hdj"] Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.913954 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.961164 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerStarted","Data":"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174"} Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.961205 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerStarted","Data":"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6"} Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.962028 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.962073 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.965621 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerStarted","Data":"8f04c8437d193d6291b14550f3aa2c470d260c2ce6bb4858d10a0d0eb19fbdf3"} Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.975129 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerID="f0be8ae559e38706bd319899ca235d4a3618e25ea8505ebbce3d31fdcc0c3537" exitCode=0 Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.975211 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c44f58df-s6697" event={"ID":"4c5b406a-e0c3-4856-a55a-58d3403994cc","Type":"ContainerDied","Data":"f0be8ae559e38706bd319899ca235d4a3618e25ea8505ebbce3d31fdcc0c3537"} Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.975235 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c44f58df-s6697" event={"ID":"4c5b406a-e0c3-4856-a55a-58d3403994cc","Type":"ContainerStarted","Data":"d9e65f223d375a09ad3bb7664d9b685d1c5c53dc1dd55ab3c70793692cc1496f"} Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.991769 4763 generic.go:334] "Generic (PLEG): container finished" podID="3af3ecac-eec4-49da-9fd7-14f74af80acf" containerID="32c98b55c1b90e666d78cb2315e11a2b37799d59d48548b26b1801a52e6f54f3" exitCode=0 Dec 06 08:30:50 crc kubenswrapper[4763]: I1206 08:30:50.991865 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" event={"ID":"3af3ecac-eec4-49da-9fd7-14f74af80acf","Type":"ContainerDied","Data":"32c98b55c1b90e666d78cb2315e11a2b37799d59d48548b26b1801a52e6f54f3"} Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.006969 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.007018 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.014958 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6dfb477846-mf82l" podStartSLOduration=4.014927872 podStartE2EDuration="4.014927872s" podCreationTimestamp="2025-12-06 08:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:51.001572993 +0000 UTC m=+1133.577278041" watchObservedRunningTime="2025-12-06 08:30:51.014927872 +0000 UTC m=+1133.590632920" Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.015617 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerStarted","Data":"85d6612067e7783ed59903b14324ba6857828fa2a145869a7a4ac9a8d90107f1"} Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.555554 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5b557d69b-qxvcs" Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.621296 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.621537 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon-log" containerID="cri-o://faa41bc1a2797743bc528d0d97be37323bf39516cf02c75e83c9caa570dfa7a9" gracePeriod=30 Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.621666 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" containerID="cri-o://172ad6cb4a74ed844abba93af0f99c6c1d603ab40f0ad451c1ac05210cb0c2b8" gracePeriod=30 Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.731971 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" path="/var/lib/kubelet/pods/9c55cdaa-1dd9-4c6e-937e-da63410a649d/volumes" Dec 06 08:30:51 crc kubenswrapper[4763]: I1206 08:30:51.879092 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.025821 4763 generic.go:334] "Generic (PLEG): container finished" podID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerID="c98b0237102d24fdeb6b9c08bbe03732b31010422b5cda02ae05f2c961e9de50" exitCode=0 Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.025882 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerDied","Data":"c98b0237102d24fdeb6b9c08bbe03732b31010422b5cda02ae05f2c961e9de50"} Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.030533 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerStarted","Data":"5fecf6466e0030860e5da8a01d400422b61c12f85723aef7d1d2bdf27836a5bd"} Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.630165 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.681966 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.682006 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.701320 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.704330 4763 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.709364 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.770149 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.790702 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.790785 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mh66l\" (UniqueName: \"kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.794623 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.795288 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.795345 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.795599 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb\") pod \"3af3ecac-eec4-49da-9fd7-14f74af80acf\" (UID: \"3af3ecac-eec4-49da-9fd7-14f74af80acf\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.852274 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l" (OuterVolumeSpecName: "kube-api-access-mh66l") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "kube-api-access-mh66l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909221 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909268 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909400 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909485 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909544 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909580 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnpcv\" (UniqueName: \"kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909664 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle\") pod \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\" (UID: \"036fdcea-9f9b-44fe-917e-4b8f8903fe48\") " Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.909938 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.910340 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.910367 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mh66l\" (UniqueName: \"kubernetes.io/projected/3af3ecac-eec4-49da-9fd7-14f74af80acf-kube-api-access-mh66l\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.910485 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.930030 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts" (OuterVolumeSpecName: "scripts") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.932888 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv" (OuterVolumeSpecName: "kube-api-access-mnpcv") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "kube-api-access-mnpcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:52 crc kubenswrapper[4763]: I1206 08:30:52.991880 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config" (OuterVolumeSpecName: "config") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.012766 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.012820 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/036fdcea-9f9b-44fe-917e-4b8f8903fe48-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.012830 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.012840 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnpcv\" (UniqueName: \"kubernetes.io/projected/036fdcea-9f9b-44fe-917e-4b8f8903fe48-kube-api-access-mnpcv\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.054248 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"036fdcea-9f9b-44fe-917e-4b8f8903fe48","Type":"ContainerDied","Data":"f4c8ae4668845d5419a42d5a2fff1d8443ff256365fd628f048624caf2cccb66"} Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.054313 4763 scope.go:117] "RemoveContainer" containerID="a633c77ac961ec6e2ebffa3f109c537f5abef5ebfa1d2693ecd21e5983b9375a" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.054259 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.059870 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" event={"ID":"3af3ecac-eec4-49da-9fd7-14f74af80acf","Type":"ContainerDied","Data":"03457159cd72c09e00d251320119333757a7a75937495d55a61c74e598c3b7d2"} Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.059919 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c6986c5b5-zdhnl" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.065070 4763 generic.go:334] "Generic (PLEG): container finished" podID="7731d4cb-7569-4783-842d-acef9e33cb50" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" exitCode=1 Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.065120 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0"} Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.066537 4763 scope.go:117] "RemoveContainer" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.066830 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.070789 4763 generic.go:334] "Generic (PLEG): container finished" podID="432569c2-b7db-4f70-80ba-80817d206847" containerID="172ad6cb4a74ed844abba93af0f99c6c1d603ab40f0ad451c1ac05210cb0c2b8" exitCode=0 Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.071360 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerDied","Data":"172ad6cb4a74ed844abba93af0f99c6c1d603ab40f0ad451c1ac05210cb0c2b8"} Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.081620 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.106844 4763 scope.go:117] "RemoveContainer" containerID="d756452bf5874c1028438a8e0c33e197f75838a921c789ce1158af9efcc7c093" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.114635 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.118734 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.137429 4763 scope.go:117] "RemoveContainer" containerID="c98b0237102d24fdeb6b9c08bbe03732b31010422b5cda02ae05f2c961e9de50" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.164235 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.174591 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3af3ecac-eec4-49da-9fd7-14f74af80acf" (UID: "3af3ecac-eec4-49da-9fd7-14f74af80acf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.199244 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.209356 4763 scope.go:117] "RemoveContainer" containerID="0020985e174ca9d052a7b02fa6c2b67bd9b122c6106e8acf26a9cb35d7098a08" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.217612 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.217659 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.217673 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.217683 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af3ecac-eec4-49da-9fd7-14f74af80acf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.262096 4763 scope.go:117] "RemoveContainer" containerID="32c98b55c1b90e666d78cb2315e11a2b37799d59d48548b26b1801a52e6f54f3" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.272560 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.310885 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data" (OuterVolumeSpecName: "config-data") pod "036fdcea-9f9b-44fe-917e-4b8f8903fe48" (UID: "036fdcea-9f9b-44fe-917e-4b8f8903fe48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.311355 4763 scope.go:117] "RemoveContainer" containerID="d5aad9eb2018ea0e78fd14c0a47cabca0b684b24712fb3bcf03ee1edd83fd9d5" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.320124 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.320161 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fdcea-9f9b-44fe-917e-4b8f8903fe48-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.404391 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.420658 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.459853 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460590 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="dnsmasq-dns" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460607 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="dnsmasq-dns" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460623 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="proxy-httpd" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460630 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="proxy-httpd" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460659 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="init" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460666 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="init" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460694 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-notification-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460702 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-notification-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460711 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af3ecac-eec4-49da-9fd7-14f74af80acf" containerName="init" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460718 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af3ecac-eec4-49da-9fd7-14f74af80acf" containerName="init" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460739 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-central-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460745 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-central-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: E1206 08:30:53.460765 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="sg-core" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.460771 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="sg-core" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461147 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-central-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461178 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3af3ecac-eec4-49da-9fd7-14f74af80acf" containerName="init" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461196 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="proxy-httpd" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461212 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="sg-core" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461228 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c55cdaa-1dd9-4c6e-937e-da63410a649d" containerName="dnsmasq-dns" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.461245 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" containerName="ceilometer-notification-agent" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.464258 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.474274 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.476103 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.519285 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.528321 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c6986c5b5-zdhnl"] Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534203 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534253 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwp6t\" (UniqueName: \"kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534328 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534359 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534375 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534397 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.534416 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.539970 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636286 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636375 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636406 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636491 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636535 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwp6t\" (UniqueName: \"kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.636664 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.637406 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.637693 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.642477 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.642879 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.644804 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.644979 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.660651 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwp6t\" (UniqueName: \"kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t\") pod \"ceilometer-0\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " pod="openstack/ceilometer-0" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.742251 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="036fdcea-9f9b-44fe-917e-4b8f8903fe48" path="/var/lib/kubelet/pods/036fdcea-9f9b-44fe-917e-4b8f8903fe48/volumes" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.743383 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3af3ecac-eec4-49da-9fd7-14f74af80acf" path="/var/lib/kubelet/pods/3af3ecac-eec4-49da-9fd7-14f74af80acf/volumes" Dec 06 08:30:53 crc kubenswrapper[4763]: I1206 08:30:53.812014 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.093295 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" event={"ID":"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6","Type":"ContainerStarted","Data":"d0dae6944fad7c189279b271ed27c53edaea5e4870f3da8905877e7d006709f9"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.094039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" event={"ID":"5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6","Type":"ContainerStarted","Data":"f077351da53a2a5a79bfb144e8931809faabe1f422842e18aa74043efd5dd531"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.109862 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerStarted","Data":"f482d7204d084ee4978cfdc0bbd59a6fafcae641d196a494b590c2272a22ba98"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.112146 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8549689586-h89zw" event={"ID":"73a66c39-800c-426e-a24b-a95a37280ebd","Type":"ContainerStarted","Data":"e56304b5189041c9cff46fbd945878dcc6f4722cd54097f3c6983028813a2ad2"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.112186 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8549689586-h89zw" event={"ID":"73a66c39-800c-426e-a24b-a95a37280ebd","Type":"ContainerStarted","Data":"37fc4f3f5a4f1b7fe87d4f718a6cf8683aab30591db94801119d5c98c61436b2"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.132143 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api-log" containerID="cri-o://5fecf6466e0030860e5da8a01d400422b61c12f85723aef7d1d2bdf27836a5bd" gracePeriod=30 Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.132308 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerStarted","Data":"69c8117d016f3c5b76ec6f7f715d7dbef50443bd150be54a5b1d87e801eaf5ec"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.132356 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.132380 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api" containerID="cri-o://69c8117d016f3c5b76ec6f7f715d7dbef50443bd150be54a5b1d87e801eaf5ec" gracePeriod=30 Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.138672 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-c9b5f6d4f-l8lbt" podStartSLOduration=3.697366936 podStartE2EDuration="7.138646462s" podCreationTimestamp="2025-12-06 08:30:47 +0000 UTC" firstStartedPulling="2025-12-06 08:30:49.155561976 +0000 UTC m=+1131.731267014" lastFinishedPulling="2025-12-06 08:30:52.596841502 +0000 UTC m=+1135.172546540" observedRunningTime="2025-12-06 08:30:54.116509165 +0000 UTC m=+1136.692214203" watchObservedRunningTime="2025-12-06 08:30:54.138646462 +0000 UTC m=+1136.714351500" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.171181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c44f58df-s6697" event={"ID":"4c5b406a-e0c3-4856-a55a-58d3403994cc","Type":"ContainerStarted","Data":"45165ad41c822d9525863f060df2a2eba8ae327f7248300d8f6bcc59b3801870"} Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.172148 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.183785 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-8549689586-h89zw" podStartSLOduration=3.964571985 podStartE2EDuration="7.183762477s" podCreationTimestamp="2025-12-06 08:30:47 +0000 UTC" firstStartedPulling="2025-12-06 08:30:49.392645164 +0000 UTC m=+1131.968350192" lastFinishedPulling="2025-12-06 08:30:52.611835646 +0000 UTC m=+1135.187540684" observedRunningTime="2025-12-06 08:30:54.158425444 +0000 UTC m=+1136.734130492" watchObservedRunningTime="2025-12-06 08:30:54.183762477 +0000 UTC m=+1136.759467515" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.219888 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84c44f58df-s6697" podStartSLOduration=6.21986156 podStartE2EDuration="6.21986156s" podCreationTimestamp="2025-12-06 08:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:54.217221278 +0000 UTC m=+1136.792926316" watchObservedRunningTime="2025-12-06 08:30:54.21986156 +0000 UTC m=+1136.795566598" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.232357 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.232334126 podStartE2EDuration="6.232334126s" podCreationTimestamp="2025-12-06 08:30:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:54.189161453 +0000 UTC m=+1136.764866491" watchObservedRunningTime="2025-12-06 08:30:54.232334126 +0000 UTC m=+1136.808039164" Dec 06 08:30:54 crc kubenswrapper[4763]: I1206 08:30:54.425742 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.182302 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerStarted","Data":"27d42574155cba03a1dd97cd82d71c76c15545158f689875073b3402242214e3"} Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.182591 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerStarted","Data":"0c3b58f8789d26003d1d83ce7b1b09b5c346ef7449e436d9cdffef03aba4ec38"} Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.182609 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerStarted","Data":"e71c988b4945ef3f0d5bd168e5ccf6db8c946a50c24e192cda73c3861e52e36a"} Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.184869 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerStarted","Data":"76f683aed32b9dedb0e9d0486288f98dc1ca6aad3ec7894a363080799df941f7"} Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.187587 4763 generic.go:334] "Generic (PLEG): container finished" podID="0d7564a7-563b-416e-b223-fe69473a041d" containerID="5fecf6466e0030860e5da8a01d400422b61c12f85723aef7d1d2bdf27836a5bd" exitCode=143 Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.188340 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerDied","Data":"5fecf6466e0030860e5da8a01d400422b61c12f85723aef7d1d2bdf27836a5bd"} Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.212050 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.573320524 podStartE2EDuration="7.212013081s" podCreationTimestamp="2025-12-06 08:30:48 +0000 UTC" firstStartedPulling="2025-12-06 08:30:50.094578316 +0000 UTC m=+1132.670283354" lastFinishedPulling="2025-12-06 08:30:50.733270873 +0000 UTC m=+1133.308975911" observedRunningTime="2025-12-06 08:30:55.203248764 +0000 UTC m=+1137.778953822" watchObservedRunningTime="2025-12-06 08:30:55.212013081 +0000 UTC m=+1137.787718119" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.494167 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.542253 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5fcff8587b-xnnm8"] Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.547369 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.551381 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.553224 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.568799 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5fcff8587b-xnnm8"] Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.702726 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data-custom\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.702775 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-internal-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.702823 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15432443-8cf1-463b-bc66-1995d774b839-logs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.703031 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-combined-ca-bundle\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.703199 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r5m9\" (UniqueName: \"kubernetes.io/projected/15432443-8cf1-463b-bc66-1995d774b839-kube-api-access-6r5m9\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.703297 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.703327 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-public-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807212 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r5m9\" (UniqueName: \"kubernetes.io/projected/15432443-8cf1-463b-bc66-1995d774b839-kube-api-access-6r5m9\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807345 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807367 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-public-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807498 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data-custom\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807526 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-internal-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807567 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15432443-8cf1-463b-bc66-1995d774b839-logs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.807607 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-combined-ca-bundle\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.809692 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15432443-8cf1-463b-bc66-1995d774b839-logs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.848950 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-internal-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.849863 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-combined-ca-bundle\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.849930 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.857355 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-public-tls-certs\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.870043 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15432443-8cf1-463b-bc66-1995d774b839-config-data-custom\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.872485 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r5m9\" (UniqueName: \"kubernetes.io/projected/15432443-8cf1-463b-bc66-1995d774b839-kube-api-access-6r5m9\") pod \"barbican-api-5fcff8587b-xnnm8\" (UID: \"15432443-8cf1-463b-bc66-1995d774b839\") " pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:55 crc kubenswrapper[4763]: I1206 08:30:55.876285 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:56 crc kubenswrapper[4763]: I1206 08:30:56.213787 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerStarted","Data":"29915af9ea203e520ed16bce754a9b8ad29c8c523feb9c5bb26dd45aa9051a1e"} Dec 06 08:30:56 crc kubenswrapper[4763]: I1206 08:30:56.421134 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5fcff8587b-xnnm8"] Dec 06 08:30:56 crc kubenswrapper[4763]: I1206 08:30:56.527802 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:56 crc kubenswrapper[4763]: I1206 08:30:56.527847 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:30:56 crc kubenswrapper[4763]: I1206 08:30:56.528330 4763 scope.go:117] "RemoveContainer" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" Dec 06 08:30:56 crc kubenswrapper[4763]: E1206 08:30:56.528625 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.224756 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fcff8587b-xnnm8" event={"ID":"15432443-8cf1-463b-bc66-1995d774b839","Type":"ContainerStarted","Data":"f44b77f38116910834da162109df5b71a0d841ec737b5e3760808e1063776327"} Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.230291 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fcff8587b-xnnm8" event={"ID":"15432443-8cf1-463b-bc66-1995d774b839","Type":"ContainerStarted","Data":"c104d7448a5e31a58995fde4d0266653bda4a8763382157ef1ff5fc65e26c67c"} Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.230304 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fcff8587b-xnnm8" event={"ID":"15432443-8cf1-463b-bc66-1995d774b839","Type":"ContainerStarted","Data":"a462cc36fbb6ca3ae60efc75615c6411d8cbf36150af5283344c9fe5ead70e02"} Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.230318 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.230328 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.663328 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:30:57 crc kubenswrapper[4763]: I1206 08:30:57.702514 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5fcff8587b-xnnm8" podStartSLOduration=2.702495849 podStartE2EDuration="2.702495849s" podCreationTimestamp="2025-12-06 08:30:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:30:57.252105495 +0000 UTC m=+1139.827810533" watchObservedRunningTime="2025-12-06 08:30:57.702495849 +0000 UTC m=+1140.278200887" Dec 06 08:30:58 crc kubenswrapper[4763]: I1206 08:30:58.241972 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerStarted","Data":"dce86d9ace68c426a9b745092305506124817985da993583637780ee632f0ccf"} Dec 06 08:30:58 crc kubenswrapper[4763]: I1206 08:30:58.285441 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.311387668 podStartE2EDuration="5.285420825s" podCreationTimestamp="2025-12-06 08:30:53 +0000 UTC" firstStartedPulling="2025-12-06 08:30:54.439092987 +0000 UTC m=+1137.014798025" lastFinishedPulling="2025-12-06 08:30:57.413126144 +0000 UTC m=+1139.988831182" observedRunningTime="2025-12-06 08:30:58.277334437 +0000 UTC m=+1140.853039475" watchObservedRunningTime="2025-12-06 08:30:58.285420825 +0000 UTC m=+1140.861125863" Dec 06 08:30:58 crc kubenswrapper[4763]: I1206 08:30:58.916232 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.044134 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.163299 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.177393 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.177766 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="dnsmasq-dns" containerID="cri-o://9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5" gracePeriod=10 Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.259867 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.330227 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.752909 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.912952 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.913011 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.913208 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4k62\" (UniqueName: \"kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.913371 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.913454 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.913517 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0\") pod \"f2782736-123f-4570-a129-af3317738af3\" (UID: \"f2782736-123f-4570-a129-af3317738af3\") " Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.919022 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62" (OuterVolumeSpecName: "kube-api-access-z4k62") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "kube-api-access-z4k62". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.973152 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config" (OuterVolumeSpecName: "config") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.979463 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.985155 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.989438 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:30:59 crc kubenswrapper[4763]: I1206 08:30:59.999431 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f2782736-123f-4570-a129-af3317738af3" (UID: "f2782736-123f-4570-a129-af3317738af3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017174 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4k62\" (UniqueName: \"kubernetes.io/projected/f2782736-123f-4570-a129-af3317738af3-kube-api-access-z4k62\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017209 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017218 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017228 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017237 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.017244 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2782736-123f-4570-a129-af3317738af3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.053316 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-798696db5c-57lrg" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.127873 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.128616 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-87d7c84fb-vhh22" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-httpd" containerID="cri-o://84a2d8258f27cce4329ff171d4ee53f0481ce71a4b0e51e9d76954f3a9fe17b9" gracePeriod=30 Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.128171 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-87d7c84fb-vhh22" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-api" containerID="cri-o://ec94c6fed6b7ebb37f96e9a6f6f313ecf88d2d351928a34200a528cdcbd90e04" gracePeriod=30 Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.269542 4763 generic.go:334] "Generic (PLEG): container finished" podID="f2782736-123f-4570-a129-af3317738af3" containerID="9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5" exitCode=0 Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.269574 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.269626 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" event={"ID":"f2782736-123f-4570-a129-af3317738af3","Type":"ContainerDied","Data":"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5"} Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.269702 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c698b9485-rqx2w" event={"ID":"f2782736-123f-4570-a129-af3317738af3","Type":"ContainerDied","Data":"ba5fccb7334f4331737e81eed6f2b59c03c90c08491d00fd39d47839d3ad5459"} Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.269801 4763 scope.go:117] "RemoveContainer" containerID="9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.270325 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="cinder-scheduler" containerID="cri-o://f482d7204d084ee4978cfdc0bbd59a6fafcae641d196a494b590c2272a22ba98" gracePeriod=30 Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.270408 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="probe" containerID="cri-o://76f683aed32b9dedb0e9d0486288f98dc1ca6aad3ec7894a363080799df941f7" gracePeriod=30 Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.303699 4763 scope.go:117] "RemoveContainer" containerID="d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.305240 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.313005 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c698b9485-rqx2w"] Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.347566 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.428841 4763 scope.go:117] "RemoveContainer" containerID="9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5" Dec 06 08:31:00 crc kubenswrapper[4763]: E1206 08:31:00.429278 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5\": container with ID starting with 9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5 not found: ID does not exist" containerID="9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.429324 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5"} err="failed to get container status \"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5\": rpc error: code = NotFound desc = could not find container \"9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5\": container with ID starting with 9d5a0b98295547d051d21957e272033a9712a4ed786a257d9b31d59b34a04fc5 not found: ID does not exist" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.429349 4763 scope.go:117] "RemoveContainer" containerID="d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829" Dec 06 08:31:00 crc kubenswrapper[4763]: E1206 08:31:00.429695 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829\": container with ID starting with d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829 not found: ID does not exist" containerID="d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.429719 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829"} err="failed to get container status \"d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829\": rpc error: code = NotFound desc = could not find container \"d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829\": container with ID starting with d29460ab58f2cc25254c71904016f300da244a410629d816d1ce6947d62cb829 not found: ID does not exist" Dec 06 08:31:00 crc kubenswrapper[4763]: I1206 08:31:00.441539 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.282256 4763 generic.go:334] "Generic (PLEG): container finished" podID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerID="76f683aed32b9dedb0e9d0486288f98dc1ca6aad3ec7894a363080799df941f7" exitCode=0 Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.282330 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerDied","Data":"76f683aed32b9dedb0e9d0486288f98dc1ca6aad3ec7894a363080799df941f7"} Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.284614 4763 generic.go:334] "Generic (PLEG): container finished" podID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerID="84a2d8258f27cce4329ff171d4ee53f0481ce71a4b0e51e9d76954f3a9fe17b9" exitCode=0 Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.284677 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerDied","Data":"84a2d8258f27cce4329ff171d4ee53f0481ce71a4b0e51e9d76954f3a9fe17b9"} Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.644850 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 06 08:31:01 crc kubenswrapper[4763]: I1206 08:31:01.729917 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2782736-123f-4570-a129-af3317738af3" path="/var/lib/kubelet/pods/f2782736-123f-4570-a129-af3317738af3/volumes" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.298416 4763 generic.go:334] "Generic (PLEG): container finished" podID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerID="f482d7204d084ee4978cfdc0bbd59a6fafcae641d196a494b590c2272a22ba98" exitCode=0 Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.298448 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerDied","Data":"f482d7204d084ee4978cfdc0bbd59a6fafcae641d196a494b590c2272a22ba98"} Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.410387 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.563681 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564304 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564450 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564453 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564731 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564867 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-942vb\" (UniqueName: \"kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.564993 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts\") pod \"7368d303-c809-421b-a5a8-32d16c00a1f7\" (UID: \"7368d303-c809-421b-a5a8-32d16c00a1f7\") " Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.565627 4763 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7368d303-c809-421b-a5a8-32d16c00a1f7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.581716 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts" (OuterVolumeSpecName: "scripts") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.582041 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.582204 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb" (OuterVolumeSpecName: "kube-api-access-942vb") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "kube-api-access-942vb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.653109 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.670996 4763 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.671058 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.671072 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-942vb\" (UniqueName: \"kubernetes.io/projected/7368d303-c809-421b-a5a8-32d16c00a1f7-kube-api-access-942vb\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.671092 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.700375 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data" (OuterVolumeSpecName: "config-data") pod "7368d303-c809-421b-a5a8-32d16c00a1f7" (UID: "7368d303-c809-421b-a5a8-32d16c00a1f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:02 crc kubenswrapper[4763]: I1206 08:31:02.774152 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7368d303-c809-421b-a5a8-32d16c00a1f7-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.328208 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7368d303-c809-421b-a5a8-32d16c00a1f7","Type":"ContainerDied","Data":"85d6612067e7783ed59903b14324ba6857828fa2a145869a7a4ac9a8d90107f1"} Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.328293 4763 scope.go:117] "RemoveContainer" containerID="76f683aed32b9dedb0e9d0486288f98dc1ca6aad3ec7894a363080799df941f7" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.328448 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.383379 4763 scope.go:117] "RemoveContainer" containerID="f482d7204d084ee4978cfdc0bbd59a6fafcae641d196a494b590c2272a22ba98" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.383400 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.409392 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.420156 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:31:03 crc kubenswrapper[4763]: E1206 08:31:03.420643 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="dnsmasq-dns" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.420669 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="dnsmasq-dns" Dec 06 08:31:03 crc kubenswrapper[4763]: E1206 08:31:03.420684 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="init" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.420691 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="init" Dec 06 08:31:03 crc kubenswrapper[4763]: E1206 08:31:03.420719 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="cinder-scheduler" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.420728 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="cinder-scheduler" Dec 06 08:31:03 crc kubenswrapper[4763]: E1206 08:31:03.420751 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="probe" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.420759 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="probe" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.421018 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="cinder-scheduler" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.421050 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2782736-123f-4570-a129-af3317738af3" containerName="dnsmasq-dns" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.421068 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" containerName="probe" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.422169 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.426601 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.432181 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.589831 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.589909 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.589948 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.589995 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.590020 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.590082 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79jrv\" (UniqueName: \"kubernetes.io/projected/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-kube-api-access-79jrv\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.692860 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.692986 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.693027 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.693079 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.693108 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.693146 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.693170 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79jrv\" (UniqueName: \"kubernetes.io/projected/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-kube-api-access-79jrv\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.698009 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.698287 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.704800 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.705269 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.715294 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79jrv\" (UniqueName: \"kubernetes.io/projected/bf9c2023-e5d0-4ad4-975d-ef654ff41dfb-kube-api-access-79jrv\") pod \"cinder-scheduler-0\" (UID: \"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb\") " pod="openstack/cinder-scheduler-0" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.742847 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7368d303-c809-421b-a5a8-32d16c00a1f7" path="/var/lib/kubelet/pods/7368d303-c809-421b-a5a8-32d16c00a1f7/volumes" Dec 06 08:31:03 crc kubenswrapper[4763]: I1206 08:31:03.745415 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 06 08:31:04 crc kubenswrapper[4763]: I1206 08:31:04.225328 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 06 08:31:04 crc kubenswrapper[4763]: W1206 08:31:04.230358 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf9c2023_e5d0_4ad4_975d_ef654ff41dfb.slice/crio-c03105ba8283de95249a8c58e080823649c7aebf5ed80413df92fdc345e35946 WatchSource:0}: Error finding container c03105ba8283de95249a8c58e080823649c7aebf5ed80413df92fdc345e35946: Status 404 returned error can't find the container with id c03105ba8283de95249a8c58e080823649c7aebf5ed80413df92fdc345e35946 Dec 06 08:31:04 crc kubenswrapper[4763]: I1206 08:31:04.342978 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb","Type":"ContainerStarted","Data":"c03105ba8283de95249a8c58e080823649c7aebf5ed80413df92fdc345e35946"} Dec 06 08:31:05 crc kubenswrapper[4763]: I1206 08:31:05.365776 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb","Type":"ContainerStarted","Data":"3a6c3696ab248733ae1899b0f2bddd5a2cc8681a4f8c95956fd3093f3bb759e8"} Dec 06 08:31:05 crc kubenswrapper[4763]: I1206 08:31:05.495020 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Dec 06 08:31:06 crc kubenswrapper[4763]: I1206 08:31:06.086477 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5fd795fc6-gh6s9" Dec 06 08:31:06 crc kubenswrapper[4763]: I1206 08:31:06.378071 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bf9c2023-e5d0-4ad4-975d-ef654ff41dfb","Type":"ContainerStarted","Data":"c786dababeb6f794d8c100288012f3ba4ba65614a18c30264ee9384d02820a88"} Dec 06 08:31:06 crc kubenswrapper[4763]: I1206 08:31:06.400491 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.400458232 podStartE2EDuration="3.400458232s" podCreationTimestamp="2025-12-06 08:31:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:06.395059696 +0000 UTC m=+1148.970764734" watchObservedRunningTime="2025-12-06 08:31:06.400458232 +0000 UTC m=+1148.976163270" Dec 06 08:31:06 crc kubenswrapper[4763]: I1206 08:31:06.720337 4763 scope.go:117] "RemoveContainer" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" Dec 06 08:31:06 crc kubenswrapper[4763]: E1206 08:31:06.720962 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.403869 4763 generic.go:334] "Generic (PLEG): container finished" podID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerID="ec94c6fed6b7ebb37f96e9a6f6f313ecf88d2d351928a34200a528cdcbd90e04" exitCode=0 Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.405067 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerDied","Data":"ec94c6fed6b7ebb37f96e9a6f6f313ecf88d2d351928a34200a528cdcbd90e04"} Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.553622 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.619524 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.646993 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5fcff8587b-xnnm8" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.749189 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.749584 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6dfb477846-mf82l" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api-log" containerID="cri-o://d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6" gracePeriod=30 Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.749918 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6dfb477846-mf82l" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api" containerID="cri-o://58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174" gracePeriod=30 Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.775506 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs\") pod \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.775690 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvs7v\" (UniqueName: \"kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v\") pod \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.775768 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle\") pod \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.775811 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config\") pod \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.775829 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config\") pod \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\" (UID: \"13ce3a1e-5249-4600-8a07-343ad9b9c4f1\") " Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.786271 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v" (OuterVolumeSpecName: "kube-api-access-gvs7v") pod "13ce3a1e-5249-4600-8a07-343ad9b9c4f1" (UID: "13ce3a1e-5249-4600-8a07-343ad9b9c4f1"). InnerVolumeSpecName "kube-api-access-gvs7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.788404 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "13ce3a1e-5249-4600-8a07-343ad9b9c4f1" (UID: "13ce3a1e-5249-4600-8a07-343ad9b9c4f1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.854825 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13ce3a1e-5249-4600-8a07-343ad9b9c4f1" (UID: "13ce3a1e-5249-4600-8a07-343ad9b9c4f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.878013 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvs7v\" (UniqueName: \"kubernetes.io/projected/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-kube-api-access-gvs7v\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.878044 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.878054 4763 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.887805 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config" (OuterVolumeSpecName: "config") pod "13ce3a1e-5249-4600-8a07-343ad9b9c4f1" (UID: "13ce3a1e-5249-4600-8a07-343ad9b9c4f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.890076 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "13ce3a1e-5249-4600-8a07-343ad9b9c4f1" (UID: "13ce3a1e-5249-4600-8a07-343ad9b9c4f1"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.981218 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:07 crc kubenswrapper[4763]: I1206 08:31:07.981418 4763 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ce3a1e-5249-4600-8a07-343ad9b9c4f1-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.206846 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.416461 4763 generic.go:334] "Generic (PLEG): container finished" podID="f0807adf-d05a-4d39-a18f-758c54015885" containerID="d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6" exitCode=143 Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.416545 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerDied","Data":"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6"} Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.419035 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-87d7c84fb-vhh22" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.419036 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-87d7c84fb-vhh22" event={"ID":"13ce3a1e-5249-4600-8a07-343ad9b9c4f1","Type":"ContainerDied","Data":"57afeacb5f6333eec9ac2843dbcab227bc4943b5575951c20face82ad6fd36a3"} Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.419123 4763 scope.go:117] "RemoveContainer" containerID="84a2d8258f27cce4329ff171d4ee53f0481ce71a4b0e51e9d76954f3a9fe17b9" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.432477 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59bf5cd876-p79rt" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.450756 4763 scope.go:117] "RemoveContainer" containerID="ec94c6fed6b7ebb37f96e9a6f6f313ecf88d2d351928a34200a528cdcbd90e04" Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.528105 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.551270 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-87d7c84fb-vhh22"] Dec 06 08:31:08 crc kubenswrapper[4763]: I1206 08:31:08.746237 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.077527 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 06 08:31:09 crc kubenswrapper[4763]: E1206 08:31:09.077923 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-api" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.077939 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-api" Dec 06 08:31:09 crc kubenswrapper[4763]: E1206 08:31:09.077974 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-httpd" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.077981 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-httpd" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.078153 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-api" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.078181 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" containerName="neutron-httpd" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.078779 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.081290 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wqc86" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.081558 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.081622 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.102761 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.222582 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.222673 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.222709 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config-secret\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.222808 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx78t\" (UniqueName: \"kubernetes.io/projected/1b185411-8ba8-4524-8f1d-e7f69f87dc05-kube-api-access-lx78t\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.324584 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx78t\" (UniqueName: \"kubernetes.io/projected/1b185411-8ba8-4524-8f1d-e7f69f87dc05-kube-api-access-lx78t\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.324660 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.324745 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.324783 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config-secret\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.325711 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.329124 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.335422 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1b185411-8ba8-4524-8f1d-e7f69f87dc05-openstack-config-secret\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.348529 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx78t\" (UniqueName: \"kubernetes.io/projected/1b185411-8ba8-4524-8f1d-e7f69f87dc05-kube-api-access-lx78t\") pod \"openstackclient\" (UID: \"1b185411-8ba8-4524-8f1d-e7f69f87dc05\") " pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.373066 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6dfb477846-mf82l" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.178:9311/healthcheck\": read tcp 10.217.0.2:53760->10.217.0.178:9311: read: connection reset by peer" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.373191 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6dfb477846-mf82l" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.178:9311/healthcheck\": read tcp 10.217.0.2:53762->10.217.0.178:9311: read: connection reset by peer" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.409709 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.740650 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ce3a1e-5249-4600-8a07-343ad9b9c4f1" path="/var/lib/kubelet/pods/13ce3a1e-5249-4600-8a07-343ad9b9c4f1/volumes" Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.912569 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 06 08:31:09 crc kubenswrapper[4763]: I1206 08:31:09.947578 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.142868 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnbfx\" (UniqueName: \"kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx\") pod \"f0807adf-d05a-4d39-a18f-758c54015885\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.142989 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data\") pod \"f0807adf-d05a-4d39-a18f-758c54015885\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.143019 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle\") pod \"f0807adf-d05a-4d39-a18f-758c54015885\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.143856 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs\") pod \"f0807adf-d05a-4d39-a18f-758c54015885\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.144030 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom\") pod \"f0807adf-d05a-4d39-a18f-758c54015885\" (UID: \"f0807adf-d05a-4d39-a18f-758c54015885\") " Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.144510 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs" (OuterVolumeSpecName: "logs") pod "f0807adf-d05a-4d39-a18f-758c54015885" (UID: "f0807adf-d05a-4d39-a18f-758c54015885"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.149121 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx" (OuterVolumeSpecName: "kube-api-access-jnbfx") pod "f0807adf-d05a-4d39-a18f-758c54015885" (UID: "f0807adf-d05a-4d39-a18f-758c54015885"). InnerVolumeSpecName "kube-api-access-jnbfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.153385 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f0807adf-d05a-4d39-a18f-758c54015885" (UID: "f0807adf-d05a-4d39-a18f-758c54015885"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.191193 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0807adf-d05a-4d39-a18f-758c54015885" (UID: "f0807adf-d05a-4d39-a18f-758c54015885"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.210194 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data" (OuterVolumeSpecName: "config-data") pod "f0807adf-d05a-4d39-a18f-758c54015885" (UID: "f0807adf-d05a-4d39-a18f-758c54015885"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.247065 4763 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.247104 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnbfx\" (UniqueName: \"kubernetes.io/projected/f0807adf-d05a-4d39-a18f-758c54015885-kube-api-access-jnbfx\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.247119 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.247129 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0807adf-d05a-4d39-a18f-758c54015885-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.247143 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0807adf-d05a-4d39-a18f-758c54015885-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.441658 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1b185411-8ba8-4524-8f1d-e7f69f87dc05","Type":"ContainerStarted","Data":"f85948207eb4f20c4161d1b0a53a1b015b1e2320dc1d2e817bc0b90e18959f02"} Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.443416 4763 generic.go:334] "Generic (PLEG): container finished" podID="f0807adf-d05a-4d39-a18f-758c54015885" containerID="58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174" exitCode=0 Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.443444 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerDied","Data":"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174"} Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.443461 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6dfb477846-mf82l" event={"ID":"f0807adf-d05a-4d39-a18f-758c54015885","Type":"ContainerDied","Data":"83dbb3504c7f7be62dd21708c1f5d4cd42cb4510bb0518c67d17c295f9eaffce"} Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.443476 4763 scope.go:117] "RemoveContainer" containerID="58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.443587 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6dfb477846-mf82l" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.476003 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.484391 4763 scope.go:117] "RemoveContainer" containerID="d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.485732 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6dfb477846-mf82l"] Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.505376 4763 scope.go:117] "RemoveContainer" containerID="58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174" Dec 06 08:31:10 crc kubenswrapper[4763]: E1206 08:31:10.505867 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174\": container with ID starting with 58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174 not found: ID does not exist" containerID="58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.506073 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174"} err="failed to get container status \"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174\": rpc error: code = NotFound desc = could not find container \"58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174\": container with ID starting with 58b0e5ffa960ac06eb76be0908d2e64cafe531f83ff8a8bc9018044f6aa7a174 not found: ID does not exist" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.506099 4763 scope.go:117] "RemoveContainer" containerID="d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6" Dec 06 08:31:10 crc kubenswrapper[4763]: E1206 08:31:10.506548 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6\": container with ID starting with d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6 not found: ID does not exist" containerID="d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6" Dec 06 08:31:10 crc kubenswrapper[4763]: I1206 08:31:10.506599 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6"} err="failed to get container status \"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6\": rpc error: code = NotFound desc = could not find container \"d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6\": container with ID starting with d28ad1ac28c1002fcc73e9fd3de07829a26e98ad3952970f77b86e29662399f6 not found: ID does not exist" Dec 06 08:31:11 crc kubenswrapper[4763]: I1206 08:31:11.739692 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0807adf-d05a-4d39-a18f-758c54015885" path="/var/lib/kubelet/pods/f0807adf-d05a-4d39-a18f-758c54015885/volumes" Dec 06 08:31:12 crc kubenswrapper[4763]: I1206 08:31:12.537176 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:31:12 crc kubenswrapper[4763]: I1206 08:31:12.537503 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:31:14 crc kubenswrapper[4763]: I1206 08:31:14.083438 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.484454 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-77bbc88767-6qptg"] Dec 06 08:31:15 crc kubenswrapper[4763]: E1206 08:31:15.485337 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.485358 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api" Dec 06 08:31:15 crc kubenswrapper[4763]: E1206 08:31:15.485426 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api-log" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.485435 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api-log" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.485663 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.485691 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0807adf-d05a-4d39-a18f-758c54015885" containerName="barbican-api-log" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.487014 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.492116 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.492199 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.492375 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.493755 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6d9d9cc79d-g6nvn" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.493873 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.495186 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-77bbc88767-6qptg"] Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541471 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-internal-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541539 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-combined-ca-bundle\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541557 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-run-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541602 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-log-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541622 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-public-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541649 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zst2w\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-kube-api-access-zst2w\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541695 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-config-data\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.541718 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-etc-swift\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643015 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zst2w\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-kube-api-access-zst2w\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643120 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-config-data\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643173 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-etc-swift\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643222 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-internal-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643294 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-combined-ca-bundle\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643321 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-run-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643392 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-log-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.643425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-public-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.644014 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-run-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.644161 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89364500-19a7-4b4f-aa5c-cf8730a63fdd-log-httpd\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.650050 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-public-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.652818 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-config-data\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.653362 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-internal-tls-certs\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.662307 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zst2w\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-kube-api-access-zst2w\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.663229 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/89364500-19a7-4b4f-aa5c-cf8730a63fdd-etc-swift\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.664679 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89364500-19a7-4b4f-aa5c-cf8730a63fdd-combined-ca-bundle\") pod \"swift-proxy-77bbc88767-6qptg\" (UID: \"89364500-19a7-4b4f-aa5c-cf8730a63fdd\") " pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:15 crc kubenswrapper[4763]: I1206 08:31:15.823511 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:16 crc kubenswrapper[4763]: I1206 08:31:16.528773 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:16 crc kubenswrapper[4763]: I1206 08:31:16.529686 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:16 crc kubenswrapper[4763]: I1206 08:31:16.530004 4763 scope.go:117] "RemoveContainer" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.316510 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.316790 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-central-agent" containerID="cri-o://0c3b58f8789d26003d1d83ce7b1b09b5c346ef7449e436d9cdffef03aba4ec38" gracePeriod=30 Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.316933 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="proxy-httpd" containerID="cri-o://dce86d9ace68c426a9b745092305506124817985da993583637780ee632f0ccf" gracePeriod=30 Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.316973 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="sg-core" containerID="cri-o://29915af9ea203e520ed16bce754a9b8ad29c8c523feb9c5bb26dd45aa9051a1e" gracePeriod=30 Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.317003 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-notification-agent" containerID="cri-o://27d42574155cba03a1dd97cd82d71c76c15545158f689875073b3402242214e3" gracePeriod=30 Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.324638 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.526793 4763 generic.go:334] "Generic (PLEG): container finished" podID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerID="29915af9ea203e520ed16bce754a9b8ad29c8c523feb9c5bb26dd45aa9051a1e" exitCode=2 Dec 06 08:31:17 crc kubenswrapper[4763]: I1206 08:31:17.527033 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerDied","Data":"29915af9ea203e520ed16bce754a9b8ad29c8c523feb9c5bb26dd45aa9051a1e"} Dec 06 08:31:18 crc kubenswrapper[4763]: I1206 08:31:18.539567 4763 generic.go:334] "Generic (PLEG): container finished" podID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerID="dce86d9ace68c426a9b745092305506124817985da993583637780ee632f0ccf" exitCode=0 Dec 06 08:31:18 crc kubenswrapper[4763]: I1206 08:31:18.539602 4763 generic.go:334] "Generic (PLEG): container finished" podID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerID="0c3b58f8789d26003d1d83ce7b1b09b5c346ef7449e436d9cdffef03aba4ec38" exitCode=0 Dec 06 08:31:18 crc kubenswrapper[4763]: I1206 08:31:18.539640 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerDied","Data":"dce86d9ace68c426a9b745092305506124817985da993583637780ee632f0ccf"} Dec 06 08:31:18 crc kubenswrapper[4763]: I1206 08:31:18.539667 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerDied","Data":"0c3b58f8789d26003d1d83ce7b1b09b5c346ef7449e436d9cdffef03aba4ec38"} Dec 06 08:31:20 crc kubenswrapper[4763]: I1206 08:31:20.398791 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-77bbc88767-6qptg"] Dec 06 08:31:20 crc kubenswrapper[4763]: W1206 08:31:20.479790 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89364500_19a7_4b4f_aa5c_cf8730a63fdd.slice/crio-9276eba3640f49fbbc60a3d4ffc520c1bc9551124f8cae8fe901a046889b8302 WatchSource:0}: Error finding container 9276eba3640f49fbbc60a3d4ffc520c1bc9551124f8cae8fe901a046889b8302: Status 404 returned error can't find the container with id 9276eba3640f49fbbc60a3d4ffc520c1bc9551124f8cae8fe901a046889b8302 Dec 06 08:31:20 crc kubenswrapper[4763]: I1206 08:31:20.560498 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-77bbc88767-6qptg" event={"ID":"89364500-19a7-4b4f-aa5c-cf8730a63fdd","Type":"ContainerStarted","Data":"9276eba3640f49fbbc60a3d4ffc520c1bc9551124f8cae8fe901a046889b8302"} Dec 06 08:31:20 crc kubenswrapper[4763]: I1206 08:31:20.567239 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015"} Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.591755 4763 generic.go:334] "Generic (PLEG): container finished" podID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerID="27d42574155cba03a1dd97cd82d71c76c15545158f689875073b3402242214e3" exitCode=0 Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.592002 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerDied","Data":"27d42574155cba03a1dd97cd82d71c76c15545158f689875073b3402242214e3"} Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.613950 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1b185411-8ba8-4524-8f1d-e7f69f87dc05","Type":"ContainerStarted","Data":"f0c274d9f97309e3049c29c952d8e470c9bc3502c8bba6a2018928ffc1ccb2db"} Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.624168 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-77bbc88767-6qptg" event={"ID":"89364500-19a7-4b4f-aa5c-cf8730a63fdd","Type":"ContainerStarted","Data":"92e882e6327de03fb3be4122ba0ad0b85f1a0d584fe1ec0aa71090744ca97f33"} Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.624220 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.624232 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.624242 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-77bbc88767-6qptg" event={"ID":"89364500-19a7-4b4f-aa5c-cf8730a63fdd","Type":"ContainerStarted","Data":"b7476ac23608ac3d1839009ea4635176c44b7bb158c6af7dd18055737020d8ee"} Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.654695 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.045501469 podStartE2EDuration="12.654677453s" podCreationTimestamp="2025-12-06 08:31:09 +0000 UTC" firstStartedPulling="2025-12-06 08:31:09.918085564 +0000 UTC m=+1152.493790602" lastFinishedPulling="2025-12-06 08:31:20.527261548 +0000 UTC m=+1163.102966586" observedRunningTime="2025-12-06 08:31:21.64006135 +0000 UTC m=+1164.215766408" watchObservedRunningTime="2025-12-06 08:31:21.654677453 +0000 UTC m=+1164.230382491" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.695450 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-rlbsb"] Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.697553 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.699801 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-77bbc88767-6qptg" podStartSLOduration=6.699782609 podStartE2EDuration="6.699782609s" podCreationTimestamp="2025-12-06 08:31:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:21.689325297 +0000 UTC m=+1164.265030335" watchObservedRunningTime="2025-12-06 08:31:21.699782609 +0000 UTC m=+1164.275487647" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.775511 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-rlbsb"] Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.794345 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv926\" (UniqueName: \"kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.794461 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.890733 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-w27ss"] Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.892646 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.896159 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv926\" (UniqueName: \"kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.896262 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.897248 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.901316 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-w27ss"] Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.933490 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv926\" (UniqueName: \"kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926\") pod \"nova-api-db-create-rlbsb\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.992283 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-ksdhn"] Dec 06 08:31:21 crc kubenswrapper[4763]: I1206 08:31:21.993841 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.004102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq7l8\" (UniqueName: \"kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.004150 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.009144 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ksdhn"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.030937 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.070746 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105015 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105117 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105145 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105209 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105243 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105267 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105383 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwp6t\" (UniqueName: \"kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t\") pod \"816a5c93-44e9-4ab0-90f1-95fac48302bc\" (UID: \"816a5c93-44e9-4ab0-90f1-95fac48302bc\") " Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.105849 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74scm\" (UniqueName: \"kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.106318 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq7l8\" (UniqueName: \"kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.106360 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.106388 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.106575 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.110501 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.113255 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.113464 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts" (OuterVolumeSpecName: "scripts") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.114329 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t" (OuterVolumeSpecName: "kube-api-access-lwp6t") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "kube-api-access-lwp6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.130509 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq7l8\" (UniqueName: \"kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8\") pod \"nova-cell0-db-create-w27ss\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.143941 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0494-account-create-update-bnvcj"] Dec 06 08:31:22 crc kubenswrapper[4763]: E1206 08:31:22.144587 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-notification-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144605 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-notification-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: E1206 08:31:22.144628 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="sg-core" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144634 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="sg-core" Dec 06 08:31:22 crc kubenswrapper[4763]: E1206 08:31:22.144643 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-central-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144650 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-central-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: E1206 08:31:22.144665 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="proxy-httpd" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144671 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="proxy-httpd" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144831 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="proxy-httpd" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144845 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="sg-core" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144852 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-notification-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.144859 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" containerName="ceilometer-central-agent" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.145476 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.149243 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.162289 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0494-account-create-update-bnvcj"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.190188 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.211775 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74scm\" (UniqueName: \"kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.211872 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjkg2\" (UniqueName: \"kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212079 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212180 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212405 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwp6t\" (UniqueName: \"kubernetes.io/projected/816a5c93-44e9-4ab0-90f1-95fac48302bc-kube-api-access-lwp6t\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212421 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212434 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212445 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/816a5c93-44e9-4ab0-90f1-95fac48302bc-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.212455 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.213179 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.251176 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74scm\" (UniqueName: \"kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm\") pod \"nova-cell1-db-create-ksdhn\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.258128 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.313211 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjkg2\" (UniqueName: \"kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.313320 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.313424 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.314098 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.341163 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.364148 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.390412 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data" (OuterVolumeSpecName: "config-data") pod "816a5c93-44e9-4ab0-90f1-95fac48302bc" (UID: "816a5c93-44e9-4ab0-90f1-95fac48302bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.393773 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjkg2\" (UniqueName: \"kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2\") pod \"nova-api-0494-account-create-update-bnvcj\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.408402 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-6f65-account-create-update-h52kq"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.410682 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.415278 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/816a5c93-44e9-4ab0-90f1-95fac48302bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.447693 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.536368 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.539828 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6f65-account-create-update-h52kq"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.542763 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.542843 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-295d5\" (UniqueName: \"kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.647006 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.647088 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-295d5\" (UniqueName: \"kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.648488 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.664482 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-e43f-account-create-update-jj5wk"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.665879 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.678419 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e43f-account-create-update-jj5wk"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.679974 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.694432 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-295d5\" (UniqueName: \"kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5\") pod \"nova-cell0-6f65-account-create-update-h52kq\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.710653 4763 generic.go:334] "Generic (PLEG): container finished" podID="432569c2-b7db-4f70-80ba-80817d206847" containerID="faa41bc1a2797743bc528d0d97be37323bf39516cf02c75e83c9caa570dfa7a9" exitCode=137 Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.710719 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerDied","Data":"faa41bc1a2797743bc528d0d97be37323bf39516cf02c75e83c9caa570dfa7a9"} Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.750988 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"816a5c93-44e9-4ab0-90f1-95fac48302bc","Type":"ContainerDied","Data":"e71c988b4945ef3f0d5bd168e5ccf6db8c946a50c24e192cda73c3861e52e36a"} Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.751297 4763 scope.go:117] "RemoveContainer" containerID="dce86d9ace68c426a9b745092305506124817985da993583637780ee632f0ccf" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.751101 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.790092 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.793956 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-rlbsb"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.832382 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.861126 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.865939 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbnsv\" (UniqueName: \"kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.881214 4763 scope.go:117] "RemoveContainer" containerID="29915af9ea203e520ed16bce754a9b8ad29c8c523feb9c5bb26dd45aa9051a1e" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.885044 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.910734 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.914527 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.917718 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.918558 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.950320 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.971036 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbnsv\" (UniqueName: \"kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.982414 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:22 crc kubenswrapper[4763]: I1206 08:31:22.984623 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.012012 4763 scope.go:117] "RemoveContainer" containerID="27d42574155cba03a1dd97cd82d71c76c15545158f689875073b3402242214e3" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.018821 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbnsv\" (UniqueName: \"kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv\") pod \"nova-cell1-e43f-account-create-update-jj5wk\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.088781 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6lqb\" (UniqueName: \"kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.088906 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.088944 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.088980 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.088994 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.089511 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.089572 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193027 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6lqb\" (UniqueName: \"kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193156 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193287 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193321 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193336 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193582 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.193807 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.194978 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.200059 4763 scope.go:117] "RemoveContainer" containerID="0c3b58f8789d26003d1d83ce7b1b09b5c346ef7449e436d9cdffef03aba4ec38" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.202139 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.204076 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.204603 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.205878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.228958 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.249665 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6lqb\" (UniqueName: \"kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb\") pod \"ceilometer-0\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.253342 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.304864 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.304949 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.305000 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.305126 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.305326 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wknps\" (UniqueName: \"kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.305471 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.305742 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle\") pod \"432569c2-b7db-4f70-80ba-80817d206847\" (UID: \"432569c2-b7db-4f70-80ba-80817d206847\") " Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.312955 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.315280 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs" (OuterVolumeSpecName: "logs") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.315715 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps" (OuterVolumeSpecName: "kube-api-access-wknps") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "kube-api-access-wknps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.392640 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.416415 4763 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.416451 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432569c2-b7db-4f70-80ba-80817d206847-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.416465 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wknps\" (UniqueName: \"kubernetes.io/projected/432569c2-b7db-4f70-80ba-80817d206847-kube-api-access-wknps\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.455672 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.466724 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts" (OuterVolumeSpecName: "scripts") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.487794 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data" (OuterVolumeSpecName: "config-data") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.503352 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.519971 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ksdhn"] Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.521161 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.521180 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.521196 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/432569c2-b7db-4f70-80ba-80817d206847-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.529626 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-w27ss"] Dec 06 08:31:23 crc kubenswrapper[4763]: W1206 08:31:23.530530 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podade65003_bc0b_43b4_ba9d_76cd8729deb1.slice/crio-e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563 WatchSource:0}: Error finding container e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563: Status 404 returned error can't find the container with id e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563 Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.538843 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0494-account-create-update-bnvcj"] Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.542425 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "432569c2-b7db-4f70-80ba-80817d206847" (UID: "432569c2-b7db-4f70-80ba-80817d206847"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.625627 4763 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/432569c2-b7db-4f70-80ba-80817d206847-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.738656 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="816a5c93-44e9-4ab0-90f1-95fac48302bc" path="/var/lib/kubelet/pods/816a5c93-44e9-4ab0-90f1-95fac48302bc/volumes" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.826425 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0494-account-create-update-bnvcj" event={"ID":"69dcf3fd-83ee-487e-9664-bf72b745d236","Type":"ContainerStarted","Data":"be739de3da56475e72220217219d024dbf853f97f8e68009016733145ee7a254"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.829940 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w27ss" event={"ID":"ade65003-bc0b-43b4-ba9d-76cd8729deb1","Type":"ContainerStarted","Data":"e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.848039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ksdhn" event={"ID":"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a","Type":"ContainerStarted","Data":"42f0e2935248da795276a8df0910c5e206be8366c79b4972a0e4f3f9cd228641"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.852494 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d9d9cc79d-g6nvn" event={"ID":"432569c2-b7db-4f70-80ba-80817d206847","Type":"ContainerDied","Data":"236303f2ca93ae7eaf03dcbbd8096f81c63a53c8c9b24b2b7bf625cd7ba17d8d"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.852538 4763 scope.go:117] "RemoveContainer" containerID="172ad6cb4a74ed844abba93af0f99c6c1d603ab40f0ad451c1ac05210cb0c2b8" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.852630 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d9d9cc79d-g6nvn" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.859935 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rlbsb" event={"ID":"e3a4e496-4b51-4e05-8b48-7edf7846d70c","Type":"ContainerStarted","Data":"0cded627a2cadd4726b8895afa9ccecabb715e3182001096b4bbc2ec60ff1395"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.859978 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rlbsb" event={"ID":"e3a4e496-4b51-4e05-8b48-7edf7846d70c","Type":"ContainerStarted","Data":"41e7a05d201b59061d9fdab61e77578f494b4d51a1d91c2d4dbe19d13e3d4084"} Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.860137 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6f65-account-create-update-h52kq"] Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.917228 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-rlbsb" podStartSLOduration=2.91720149 podStartE2EDuration="2.91720149s" podCreationTimestamp="2025-12-06 08:31:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:23.887205293 +0000 UTC m=+1166.462910331" watchObservedRunningTime="2025-12-06 08:31:23.91720149 +0000 UTC m=+1166.492906528" Dec 06 08:31:23 crc kubenswrapper[4763]: I1206 08:31:23.966324 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e43f-account-create-update-jj5wk"] Dec 06 08:31:23 crc kubenswrapper[4763]: W1206 08:31:23.974798 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41c2ea22_f27e_4a11_bda3_17b509191246.slice/crio-68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85 WatchSource:0}: Error finding container 68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85: Status 404 returned error can't find the container with id 68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.122648 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.175402 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.242038 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.275933 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d9d9cc79d-g6nvn"] Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.446716 4763 scope.go:117] "RemoveContainer" containerID="faa41bc1a2797743bc528d0d97be37323bf39516cf02c75e83c9caa570dfa7a9" Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.877446 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerStarted","Data":"b63b05e53e76fd09216e521ce5aeb504f32233892ba7b011deaa7ea212aad2a1"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.877768 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerStarted","Data":"a04274dd4705134bc51bee4cccbda0fe9b634fa206c25ec2c905cba4767a6bdf"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.881773 4763 generic.go:334] "Generic (PLEG): container finished" podID="7731d4cb-7569-4783-842d-acef9e33cb50" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" exitCode=1 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.881877 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.881933 4763 scope.go:117] "RemoveContainer" containerID="e45544d0106c1c0fa6adcc91cf5d8dad3edef8158b0994d69d0be1849fa3d2a0" Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.882850 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:31:24 crc kubenswrapper[4763]: E1206 08:31:24.883129 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.887539 4763 generic.go:334] "Generic (PLEG): container finished" podID="e3a4e496-4b51-4e05-8b48-7edf7846d70c" containerID="0cded627a2cadd4726b8895afa9ccecabb715e3182001096b4bbc2ec60ff1395" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.887683 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rlbsb" event={"ID":"e3a4e496-4b51-4e05-8b48-7edf7846d70c","Type":"ContainerDied","Data":"0cded627a2cadd4726b8895afa9ccecabb715e3182001096b4bbc2ec60ff1395"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.893565 4763 generic.go:334] "Generic (PLEG): container finished" podID="69dcf3fd-83ee-487e-9664-bf72b745d236" containerID="76699d38119a38d502310a830558b278889dd2c7a0842979ab647d696b4a5df0" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.893696 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0494-account-create-update-bnvcj" event={"ID":"69dcf3fd-83ee-487e-9664-bf72b745d236","Type":"ContainerDied","Data":"76699d38119a38d502310a830558b278889dd2c7a0842979ab647d696b4a5df0"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.905402 4763 generic.go:334] "Generic (PLEG): container finished" podID="ade65003-bc0b-43b4-ba9d-76cd8729deb1" containerID="7093f4301b57be16bbd48c53cbc462645f2b401817814f3461964e8700d78312" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.905513 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w27ss" event={"ID":"ade65003-bc0b-43b4-ba9d-76cd8729deb1","Type":"ContainerDied","Data":"7093f4301b57be16bbd48c53cbc462645f2b401817814f3461964e8700d78312"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.912355 4763 generic.go:334] "Generic (PLEG): container finished" podID="cee19965-9240-4933-8864-fd187283c3ba" containerID="47ce7ecd429f3aeeff3f204650f5ab79ed14aceaae2fcff4146042d595715556" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.912446 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" event={"ID":"cee19965-9240-4933-8864-fd187283c3ba","Type":"ContainerDied","Data":"47ce7ecd429f3aeeff3f204650f5ab79ed14aceaae2fcff4146042d595715556"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.912480 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" event={"ID":"cee19965-9240-4933-8864-fd187283c3ba","Type":"ContainerStarted","Data":"5b05bd20fb277902f6aeccf6d36e7861898db56060a73c67bf70f9855fcf29df"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.914428 4763 generic.go:334] "Generic (PLEG): container finished" podID="41c2ea22-f27e-4a11-bda3-17b509191246" containerID="0c090efeca0e20d3d72c8e409b6b2151d7b912f8f620783f2a5c21f5943147b5" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.914498 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" event={"ID":"41c2ea22-f27e-4a11-bda3-17b509191246","Type":"ContainerDied","Data":"0c090efeca0e20d3d72c8e409b6b2151d7b912f8f620783f2a5c21f5943147b5"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.914520 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" event={"ID":"41c2ea22-f27e-4a11-bda3-17b509191246","Type":"ContainerStarted","Data":"68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.941673 4763 generic.go:334] "Generic (PLEG): container finished" podID="0d7564a7-563b-416e-b223-fe69473a041d" containerID="69c8117d016f3c5b76ec6f7f715d7dbef50443bd150be54a5b1d87e801eaf5ec" exitCode=137 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.941741 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerDied","Data":"69c8117d016f3c5b76ec6f7f715d7dbef50443bd150be54a5b1d87e801eaf5ec"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.941763 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d7564a7-563b-416e-b223-fe69473a041d","Type":"ContainerDied","Data":"8f04c8437d193d6291b14550f3aa2c470d260c2ce6bb4858d10a0d0eb19fbdf3"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.941773 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f04c8437d193d6291b14550f3aa2c470d260c2ce6bb4858d10a0d0eb19fbdf3" Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.951495 4763 generic.go:334] "Generic (PLEG): container finished" podID="c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" containerID="f6c57d6cd958a1fb65b31e203be2c6dcf02f8bf778b3af47df43a1720837dab4" exitCode=0 Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.951546 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ksdhn" event={"ID":"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a","Type":"ContainerDied","Data":"f6c57d6cd958a1fb65b31e203be2c6dcf02f8bf778b3af47df43a1720837dab4"} Dec 06 08:31:24 crc kubenswrapper[4763]: I1206 08:31:24.978178 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.056709 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.056981 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.057044 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.057064 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.057159 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.057338 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg7gw\" (UniqueName: \"kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.057376 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts\") pod \"0d7564a7-563b-416e-b223-fe69473a041d\" (UID: \"0d7564a7-563b-416e-b223-fe69473a041d\") " Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.062257 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.066675 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs" (OuterVolumeSpecName: "logs") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.071676 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw" (OuterVolumeSpecName: "kube-api-access-tg7gw") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "kube-api-access-tg7gw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.071804 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts" (OuterVolumeSpecName: "scripts") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.072129 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.127440 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.149211 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data" (OuterVolumeSpecName: "config-data") pod "0d7564a7-563b-416e-b223-fe69473a041d" (UID: "0d7564a7-563b-416e-b223-fe69473a041d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160427 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d7564a7-563b-416e-b223-fe69473a041d-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160623 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg7gw\" (UniqueName: \"kubernetes.io/projected/0d7564a7-563b-416e-b223-fe69473a041d-kube-api-access-tg7gw\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160692 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160791 4763 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160853 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160926 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d7564a7-563b-416e-b223-fe69473a041d-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.160984 4763 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d7564a7-563b-416e-b223-fe69473a041d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.278218 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.732282 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="432569c2-b7db-4f70-80ba-80817d206847" path="/var/lib/kubelet/pods/432569c2-b7db-4f70-80ba-80817d206847/volumes" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.835253 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.835705 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-77bbc88767-6qptg" Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.965369 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerStarted","Data":"e8be3188365641da2843768c6fe573e4983b5bab0a8c048ab96b76602db0663f"} Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.965795 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerStarted","Data":"26e96cb8cc88e635a6331a6b765e7934523e1bf54f2b908c813e6b5201bec243"} Dec 06 08:31:25 crc kubenswrapper[4763]: I1206 08:31:25.973989 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.014648 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.047663 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076147 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:31:26 crc kubenswrapper[4763]: E1206 08:31:26.076569 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api-log" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076581 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api-log" Dec 06 08:31:26 crc kubenswrapper[4763]: E1206 08:31:26.076615 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076620 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api" Dec 06 08:31:26 crc kubenswrapper[4763]: E1206 08:31:26.076629 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076636 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" Dec 06 08:31:26 crc kubenswrapper[4763]: E1206 08:31:26.076655 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon-log" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076660 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon-log" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076831 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api-log" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076844 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d7564a7-563b-416e-b223-fe69473a041d" containerName="cinder-api" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076857 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon-log" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.076875 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="432569c2-b7db-4f70-80ba-80817d206847" containerName="horizon" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.077887 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.087500 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.087708 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.088466 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.089070 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.211916 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.211993 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-logs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212013 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-public-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212047 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212062 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-scripts\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212089 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212111 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data-custom\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212160 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phcdq\" (UniqueName: \"kubernetes.io/projected/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-kube-api-access-phcdq\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.212238 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317506 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-public-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317552 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-logs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317595 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317615 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-scripts\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317678 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data-custom\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317716 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317736 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phcdq\" (UniqueName: \"kubernetes.io/projected/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-kube-api-access-phcdq\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317933 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.317977 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.319100 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-logs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.325756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.326251 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data-custom\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.326888 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.328670 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-public-tls-certs\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.335101 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-config-data\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.335944 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-scripts\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.337114 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phcdq\" (UniqueName: \"kubernetes.io/projected/acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa-kube-api-access-phcdq\") pod \"cinder-api-0\" (UID: \"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa\") " pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.451378 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.530186 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.530227 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.530889 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:31:26 crc kubenswrapper[4763]: E1206 08:31:26.531470 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.617803 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.655449 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.738049 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts\") pod \"cee19965-9240-4933-8864-fd187283c3ba\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.738381 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbnsv\" (UniqueName: \"kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv\") pod \"41c2ea22-f27e-4a11-bda3-17b509191246\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.738507 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cee19965-9240-4933-8864-fd187283c3ba" (UID: "cee19965-9240-4933-8864-fd187283c3ba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.738519 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-295d5\" (UniqueName: \"kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5\") pod \"cee19965-9240-4933-8864-fd187283c3ba\" (UID: \"cee19965-9240-4933-8864-fd187283c3ba\") " Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.738655 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts\") pod \"41c2ea22-f27e-4a11-bda3-17b509191246\" (UID: \"41c2ea22-f27e-4a11-bda3-17b509191246\") " Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.739373 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cee19965-9240-4933-8864-fd187283c3ba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.739749 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "41c2ea22-f27e-4a11-bda3-17b509191246" (UID: "41c2ea22-f27e-4a11-bda3-17b509191246"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.756520 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv" (OuterVolumeSpecName: "kube-api-access-fbnsv") pod "41c2ea22-f27e-4a11-bda3-17b509191246" (UID: "41c2ea22-f27e-4a11-bda3-17b509191246"). InnerVolumeSpecName "kube-api-access-fbnsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.764189 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5" (OuterVolumeSpecName: "kube-api-access-295d5") pod "cee19965-9240-4933-8864-fd187283c3ba" (UID: "cee19965-9240-4933-8864-fd187283c3ba"). InnerVolumeSpecName "kube-api-access-295d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.841019 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbnsv\" (UniqueName: \"kubernetes.io/projected/41c2ea22-f27e-4a11-bda3-17b509191246-kube-api-access-fbnsv\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.841540 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-295d5\" (UniqueName: \"kubernetes.io/projected/cee19965-9240-4933-8864-fd187283c3ba-kube-api-access-295d5\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.841559 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41c2ea22-f27e-4a11-bda3-17b509191246-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.992184 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.994222 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6f65-account-create-update-h52kq" event={"ID":"cee19965-9240-4933-8864-fd187283c3ba","Type":"ContainerDied","Data":"5b05bd20fb277902f6aeccf6d36e7861898db56060a73c67bf70f9855fcf29df"} Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.994276 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b05bd20fb277902f6aeccf6d36e7861898db56060a73c67bf70f9855fcf29df" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.999767 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.999781 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e43f-account-create-update-jj5wk" event={"ID":"41c2ea22-f27e-4a11-bda3-17b509191246","Type":"ContainerDied","Data":"68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85"} Dec 06 08:31:26 crc kubenswrapper[4763]: I1206 08:31:26.999815 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68e8acf81ee50cae694c1445a49da9e6e04c9635257f38873cf9e1c17370ee85" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.002641 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ksdhn" event={"ID":"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a","Type":"ContainerDied","Data":"42f0e2935248da795276a8df0910c5e206be8366c79b4972a0e4f3f9cd228641"} Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.002849 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f0e2935248da795276a8df0910c5e206be8366c79b4972a0e4f3f9cd228641" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.007658 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rlbsb" event={"ID":"e3a4e496-4b51-4e05-8b48-7edf7846d70c","Type":"ContainerDied","Data":"41e7a05d201b59061d9fdab61e77578f494b4d51a1d91c2d4dbe19d13e3d4084"} Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.007784 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41e7a05d201b59061d9fdab61e77578f494b4d51a1d91c2d4dbe19d13e3d4084" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.010132 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0494-account-create-update-bnvcj" event={"ID":"69dcf3fd-83ee-487e-9664-bf72b745d236","Type":"ContainerDied","Data":"be739de3da56475e72220217219d024dbf853f97f8e68009016733145ee7a254"} Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.010254 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be739de3da56475e72220217219d024dbf853f97f8e68009016733145ee7a254" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.012098 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w27ss" event={"ID":"ade65003-bc0b-43b4-ba9d-76cd8729deb1","Type":"ContainerDied","Data":"e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563"} Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.012197 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1edacaaa78ac87010873c2051dd3a952da8219cfb1d215631bc7747949d0563" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.020872 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.040137 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.049777 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.063852 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.138176 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 06 08:31:27 crc kubenswrapper[4763]: W1206 08:31:27.145939 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacc25d06_1cc0_4a09_b0d2_5bb9e423f7fa.slice/crio-2533d37dcf22b54f850183785fd9775b52c3f4aae6dda1221589498115a9843e WatchSource:0}: Error finding container 2533d37dcf22b54f850183785fd9775b52c3f4aae6dda1221589498115a9843e: Status 404 returned error can't find the container with id 2533d37dcf22b54f850183785fd9775b52c3f4aae6dda1221589498115a9843e Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.146091 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv926\" (UniqueName: \"kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926\") pod \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.146170 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts\") pod \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.146209 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts\") pod \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.150072 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq7l8\" (UniqueName: \"kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8\") pod \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\" (UID: \"ade65003-bc0b-43b4-ba9d-76cd8729deb1\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.150143 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjkg2\" (UniqueName: \"kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2\") pod \"69dcf3fd-83ee-487e-9664-bf72b745d236\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.150251 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts\") pod \"69dcf3fd-83ee-487e-9664-bf72b745d236\" (UID: \"69dcf3fd-83ee-487e-9664-bf72b745d236\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.150310 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts\") pod \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\" (UID: \"e3a4e496-4b51-4e05-8b48-7edf7846d70c\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.150365 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74scm\" (UniqueName: \"kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm\") pod \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\" (UID: \"c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a\") " Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.152174 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926" (OuterVolumeSpecName: "kube-api-access-dv926") pod "e3a4e496-4b51-4e05-8b48-7edf7846d70c" (UID: "e3a4e496-4b51-4e05-8b48-7edf7846d70c"). InnerVolumeSpecName "kube-api-access-dv926". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.152346 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ade65003-bc0b-43b4-ba9d-76cd8729deb1" (UID: "ade65003-bc0b-43b4-ba9d-76cd8729deb1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.152360 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" (UID: "c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.152691 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3a4e496-4b51-4e05-8b48-7edf7846d70c" (UID: "e3a4e496-4b51-4e05-8b48-7edf7846d70c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.152755 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69dcf3fd-83ee-487e-9664-bf72b745d236" (UID: "69dcf3fd-83ee-487e-9664-bf72b745d236"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.154542 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm" (OuterVolumeSpecName: "kube-api-access-74scm") pod "c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" (UID: "c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a"). InnerVolumeSpecName "kube-api-access-74scm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.156165 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8" (OuterVolumeSpecName: "kube-api-access-nq7l8") pod "ade65003-bc0b-43b4-ba9d-76cd8729deb1" (UID: "ade65003-bc0b-43b4-ba9d-76cd8729deb1"). InnerVolumeSpecName "kube-api-access-nq7l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.157343 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2" (OuterVolumeSpecName: "kube-api-access-vjkg2") pod "69dcf3fd-83ee-487e-9664-bf72b745d236" (UID: "69dcf3fd-83ee-487e-9664-bf72b745d236"). InnerVolumeSpecName "kube-api-access-vjkg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253053 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjkg2\" (UniqueName: \"kubernetes.io/projected/69dcf3fd-83ee-487e-9664-bf72b745d236-kube-api-access-vjkg2\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253112 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69dcf3fd-83ee-487e-9664-bf72b745d236-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253126 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3a4e496-4b51-4e05-8b48-7edf7846d70c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253138 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74scm\" (UniqueName: \"kubernetes.io/projected/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-kube-api-access-74scm\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253149 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv926\" (UniqueName: \"kubernetes.io/projected/e3a4e496-4b51-4e05-8b48-7edf7846d70c-kube-api-access-dv926\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253160 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253192 4763 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ade65003-bc0b-43b4-ba9d-76cd8729deb1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.253205 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq7l8\" (UniqueName: \"kubernetes.io/projected/ade65003-bc0b-43b4-ba9d-76cd8729deb1-kube-api-access-nq7l8\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:27 crc kubenswrapper[4763]: I1206 08:31:27.736297 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d7564a7-563b-416e-b223-fe69473a041d" path="/var/lib/kubelet/pods/0d7564a7-563b-416e-b223-fe69473a041d/volumes" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.020780 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa","Type":"ContainerStarted","Data":"2533d37dcf22b54f850183785fd9775b52c3f4aae6dda1221589498115a9843e"} Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.024205 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w27ss" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.024255 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerStarted","Data":"782bd8c381d490e6907cd9f990b26f995d9e7731ac007d1c6d4d1571f686a93d"} Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.024310 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ksdhn" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.024301 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-central-agent" containerID="cri-o://b63b05e53e76fd09216e521ce5aeb504f32233892ba7b011deaa7ea212aad2a1" gracePeriod=30 Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.024409 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.025202 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rlbsb" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.025312 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0494-account-create-update-bnvcj" Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.025717 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="proxy-httpd" containerID="cri-o://782bd8c381d490e6907cd9f990b26f995d9e7731ac007d1c6d4d1571f686a93d" gracePeriod=30 Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.025866 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-notification-agent" containerID="cri-o://26e96cb8cc88e635a6331a6b765e7934523e1bf54f2b908c813e6b5201bec243" gracePeriod=30 Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.025954 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="sg-core" containerID="cri-o://e8be3188365641da2843768c6fe573e4983b5bab0a8c048ab96b76602db0663f" gracePeriod=30 Dec 06 08:31:28 crc kubenswrapper[4763]: I1206 08:31:28.074813 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.024693679 podStartE2EDuration="6.074795746s" podCreationTimestamp="2025-12-06 08:31:22 +0000 UTC" firstStartedPulling="2025-12-06 08:31:24.175111149 +0000 UTC m=+1166.750816187" lastFinishedPulling="2025-12-06 08:31:27.225213226 +0000 UTC m=+1169.800918254" observedRunningTime="2025-12-06 08:31:28.045963679 +0000 UTC m=+1170.621668717" watchObservedRunningTime="2025-12-06 08:31:28.074795746 +0000 UTC m=+1170.650500784" Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.033630 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa","Type":"ContainerStarted","Data":"18ff193d3cabb7462971521df23eb8f1f30bbd7ddc1db48136f12a99001c12fa"} Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037017 4763 generic.go:334] "Generic (PLEG): container finished" podID="798e0450-759b-41f7-afff-657c49ffb31f" containerID="782bd8c381d490e6907cd9f990b26f995d9e7731ac007d1c6d4d1571f686a93d" exitCode=0 Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037049 4763 generic.go:334] "Generic (PLEG): container finished" podID="798e0450-759b-41f7-afff-657c49ffb31f" containerID="e8be3188365641da2843768c6fe573e4983b5bab0a8c048ab96b76602db0663f" exitCode=2 Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037058 4763 generic.go:334] "Generic (PLEG): container finished" podID="798e0450-759b-41f7-afff-657c49ffb31f" containerID="26e96cb8cc88e635a6331a6b765e7934523e1bf54f2b908c813e6b5201bec243" exitCode=0 Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037079 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerDied","Data":"782bd8c381d490e6907cd9f990b26f995d9e7731ac007d1c6d4d1571f686a93d"} Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037114 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerDied","Data":"e8be3188365641da2843768c6fe573e4983b5bab0a8c048ab96b76602db0663f"} Dec 06 08:31:29 crc kubenswrapper[4763]: I1206 08:31:29.037128 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerDied","Data":"26e96cb8cc88e635a6331a6b765e7934523e1bf54f2b908c813e6b5201bec243"} Dec 06 08:31:31 crc kubenswrapper[4763]: I1206 08:31:31.056731 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa","Type":"ContainerStarted","Data":"cc164f54f60825c7799b55e506338af0ab23000f380190911835edaab458d15c"} Dec 06 08:31:31 crc kubenswrapper[4763]: I1206 08:31:31.056907 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 06 08:31:31 crc kubenswrapper[4763]: I1206 08:31:31.091797 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.091772629 podStartE2EDuration="5.091772629s" podCreationTimestamp="2025-12-06 08:31:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:31.080739873 +0000 UTC m=+1173.656444931" watchObservedRunningTime="2025-12-06 08:31:31.091772629 +0000 UTC m=+1173.667477667" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.555138 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7nmxm"] Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.555812 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cee19965-9240-4933-8864-fd187283c3ba" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.555827 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="cee19965-9240-4933-8864-fd187283c3ba" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.555866 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ade65003-bc0b-43b4-ba9d-76cd8729deb1" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.555876 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ade65003-bc0b-43b4-ba9d-76cd8729deb1" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.555964 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a4e496-4b51-4e05-8b48-7edf7846d70c" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.555975 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a4e496-4b51-4e05-8b48-7edf7846d70c" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.555990 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.555997 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.556016 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69dcf3fd-83ee-487e-9664-bf72b745d236" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556023 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="69dcf3fd-83ee-487e-9664-bf72b745d236" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: E1206 08:31:32.556034 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c2ea22-f27e-4a11-bda3-17b509191246" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556041 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c2ea22-f27e-4a11-bda3-17b509191246" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556268 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c2ea22-f27e-4a11-bda3-17b509191246" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556282 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3a4e496-4b51-4e05-8b48-7edf7846d70c" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556292 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="69dcf3fd-83ee-487e-9664-bf72b745d236" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556310 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556328 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ade65003-bc0b-43b4-ba9d-76cd8729deb1" containerName="mariadb-database-create" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.556343 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="cee19965-9240-4933-8864-fd187283c3ba" containerName="mariadb-account-create-update" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.558255 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.561476 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.561806 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-g6xwf" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.561807 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.575473 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7nmxm"] Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.674367 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.674720 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.674803 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.674877 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wsrw\" (UniqueName: \"kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.776504 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wsrw\" (UniqueName: \"kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.776573 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.776640 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.776743 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.785398 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.788369 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.796730 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.820488 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wsrw\" (UniqueName: \"kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw\") pod \"nova-cell0-conductor-db-sync-7nmxm\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:32 crc kubenswrapper[4763]: I1206 08:31:32.892993 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:33 crc kubenswrapper[4763]: W1206 08:31:33.372944 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1d63988_0eda_49ae_b4ec_0cf81b1f9784.slice/crio-0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240 WatchSource:0}: Error finding container 0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240: Status 404 returned error can't find the container with id 0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240 Dec 06 08:31:33 crc kubenswrapper[4763]: I1206 08:31:33.376510 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7nmxm"] Dec 06 08:31:34 crc kubenswrapper[4763]: I1206 08:31:34.107463 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" event={"ID":"a1d63988-0eda-49ae-b4ec-0cf81b1f9784","Type":"ContainerStarted","Data":"0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240"} Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.154082 4763 generic.go:334] "Generic (PLEG): container finished" podID="798e0450-759b-41f7-afff-657c49ffb31f" containerID="b63b05e53e76fd09216e521ce5aeb504f32233892ba7b011deaa7ea212aad2a1" exitCode=0 Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.154229 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerDied","Data":"b63b05e53e76fd09216e521ce5aeb504f32233892ba7b011deaa7ea212aad2a1"} Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.311005 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432558 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432753 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432830 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6lqb\" (UniqueName: \"kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432913 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432957 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.432998 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.433084 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts\") pod \"798e0450-759b-41f7-afff-657c49ffb31f\" (UID: \"798e0450-759b-41f7-afff-657c49ffb31f\") " Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.433428 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.433575 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.433923 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.433948 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/798e0450-759b-41f7-afff-657c49ffb31f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.439532 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts" (OuterVolumeSpecName: "scripts") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.449219 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb" (OuterVolumeSpecName: "kube-api-access-f6lqb") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "kube-api-access-f6lqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.496245 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.536148 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.536184 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6lqb\" (UniqueName: \"kubernetes.io/projected/798e0450-759b-41f7-afff-657c49ffb31f-kube-api-access-f6lqb\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.536195 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.556947 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.567493 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data" (OuterVolumeSpecName: "config-data") pod "798e0450-759b-41f7-afff-657c49ffb31f" (UID: "798e0450-759b-41f7-afff-657c49ffb31f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.638085 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:35 crc kubenswrapper[4763]: I1206 08:31:35.638131 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/798e0450-759b-41f7-afff-657c49ffb31f-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.183803 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"798e0450-759b-41f7-afff-657c49ffb31f","Type":"ContainerDied","Data":"a04274dd4705134bc51bee4cccbda0fe9b634fa206c25ec2c905cba4767a6bdf"} Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.184157 4763 scope.go:117] "RemoveContainer" containerID="782bd8c381d490e6907cd9f990b26f995d9e7731ac007d1c6d4d1571f686a93d" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.184316 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.226975 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.239642 4763 scope.go:117] "RemoveContainer" containerID="e8be3188365641da2843768c6fe573e4983b5bab0a8c048ab96b76602db0663f" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.244673 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.255949 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:36 crc kubenswrapper[4763]: E1206 08:31:36.256393 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-central-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256414 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-central-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: E1206 08:31:36.256436 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-notification-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256443 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-notification-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: E1206 08:31:36.256473 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="sg-core" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256479 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="sg-core" Dec 06 08:31:36 crc kubenswrapper[4763]: E1206 08:31:36.256490 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="proxy-httpd" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256496 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="proxy-httpd" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256690 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-central-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256719 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="proxy-httpd" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256735 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="ceilometer-notification-agent" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.256751 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="798e0450-759b-41f7-afff-657c49ffb31f" containerName="sg-core" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.258858 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.261404 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.261613 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.268374 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.295474 4763 scope.go:117] "RemoveContainer" containerID="26e96cb8cc88e635a6331a6b765e7934523e1bf54f2b908c813e6b5201bec243" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.317267 4763 scope.go:117] "RemoveContainer" containerID="b63b05e53e76fd09216e521ce5aeb504f32233892ba7b011deaa7ea212aad2a1" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381257 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381357 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381716 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kn6s\" (UniqueName: \"kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381864 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381915 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.381999 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.382102 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.484188 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.484264 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.484335 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kn6s\" (UniqueName: \"kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485265 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485302 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485328 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485365 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485548 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.485811 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.491597 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.492422 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.493159 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.504843 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.506373 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kn6s\" (UniqueName: \"kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s\") pod \"ceilometer-0\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " pod="openstack/ceilometer-0" Dec 06 08:31:36 crc kubenswrapper[4763]: I1206 08:31:36.587123 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:37 crc kubenswrapper[4763]: I1206 08:31:37.728096 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:31:37 crc kubenswrapper[4763]: E1206 08:31:37.728670 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:37 crc kubenswrapper[4763]: I1206 08:31:37.736617 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="798e0450-759b-41f7-afff-657c49ffb31f" path="/var/lib/kubelet/pods/798e0450-759b-41f7-afff-657c49ffb31f/volumes" Dec 06 08:31:38 crc kubenswrapper[4763]: I1206 08:31:38.565270 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.245885 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" event={"ID":"a1d63988-0eda-49ae-b4ec-0cf81b1f9784","Type":"ContainerStarted","Data":"29ffe96153cb0d73b77807cd6b82344fc55acf089dc138c21c0ec7c68c43178f"} Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.268038 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" podStartSLOduration=1.696863797 podStartE2EDuration="10.268012742s" podCreationTimestamp="2025-12-06 08:31:32 +0000 UTC" firstStartedPulling="2025-12-06 08:31:33.375477148 +0000 UTC m=+1175.951182186" lastFinishedPulling="2025-12-06 08:31:41.946626093 +0000 UTC m=+1184.522331131" observedRunningTime="2025-12-06 08:31:42.261537377 +0000 UTC m=+1184.837242415" watchObservedRunningTime="2025-12-06 08:31:42.268012742 +0000 UTC m=+1184.843717780" Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.357305 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:42 crc kubenswrapper[4763]: W1206 08:31:42.361548 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod703c7bfb_9dd8_4acd_9a9f_a8efb81dbabb.slice/crio-83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed WatchSource:0}: Error finding container 83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed: Status 404 returned error can't find the container with id 83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.537437 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.537751 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.537799 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.538422 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:31:42 crc kubenswrapper[4763]: I1206 08:31:42.538474 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4" gracePeriod=600 Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.269011 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerStarted","Data":"bfe5dbae931004786a2a8148052c457a45ec6acc21921d30966615d35de3a65b"} Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.269498 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerStarted","Data":"c6ee4a5644273508d844ddb9513f23b938dc9de3adccd204f19a2deeade5ec7e"} Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.269512 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerStarted","Data":"83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed"} Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.272838 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4" exitCode=0 Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.272925 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4"} Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.272995 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd"} Dec 06 08:31:43 crc kubenswrapper[4763]: I1206 08:31:43.273016 4763 scope.go:117] "RemoveContainer" containerID="7d94fae00dffdf507a33769e3c6f2fe9c1acc5dc734ee30a31695ce80e2528cc" Dec 06 08:31:44 crc kubenswrapper[4763]: I1206 08:31:44.287591 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerStarted","Data":"79ba783d9368d66361908c0ff5a6fd9aeaf1d4865b368239aecfd36fdc7d13ce"} Dec 06 08:31:45 crc kubenswrapper[4763]: I1206 08:31:45.302755 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerStarted","Data":"3b5d636734408a792ee7c67ff4b9636f2127f9e650577e576161174b7cff6c31"} Dec 06 08:31:45 crc kubenswrapper[4763]: I1206 08:31:45.303346 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:31:45 crc kubenswrapper[4763]: I1206 08:31:45.327264 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.852237691 podStartE2EDuration="9.327242214s" podCreationTimestamp="2025-12-06 08:31:36 +0000 UTC" firstStartedPulling="2025-12-06 08:31:42.36410338 +0000 UTC m=+1184.939808418" lastFinishedPulling="2025-12-06 08:31:44.839107903 +0000 UTC m=+1187.414812941" observedRunningTime="2025-12-06 08:31:45.324568902 +0000 UTC m=+1187.900273970" watchObservedRunningTime="2025-12-06 08:31:45.327242214 +0000 UTC m=+1187.902947252" Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.486104 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.487409 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-log" containerID="cri-o://8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9" gracePeriod=30 Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.487480 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-httpd" containerID="cri-o://2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1" gracePeriod=30 Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.528269 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.528330 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:31:46 crc kubenswrapper[4763]: I1206 08:31:46.529213 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:31:46 crc kubenswrapper[4763]: E1206 08:31:46.529473 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:47 crc kubenswrapper[4763]: I1206 08:31:47.322843 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerID="8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9" exitCode=143 Dec 06 08:31:47 crc kubenswrapper[4763]: I1206 08:31:47.322965 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerDied","Data":"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9"} Dec 06 08:31:47 crc kubenswrapper[4763]: I1206 08:31:47.584714 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:47 crc kubenswrapper[4763]: I1206 08:31:47.586264 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-log" containerID="cri-o://23b9588da28ae21a6c7103f6638ff5ab0bc536c0da94d1411d3a43609732d58f" gracePeriod=30 Dec 06 08:31:47 crc kubenswrapper[4763]: I1206 08:31:47.586354 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-httpd" containerID="cri-o://2c87cac4c45b34d6ade9c2630d14bf3a69128aee8227bc164c65239c1de87e46" gracePeriod=30 Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.216742 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.350440 4763 generic.go:334] "Generic (PLEG): container finished" podID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerID="23b9588da28ae21a6c7103f6638ff5ab0bc536c0da94d1411d3a43609732d58f" exitCode=143 Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.350503 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerDied","Data":"23b9588da28ae21a6c7103f6638ff5ab0bc536c0da94d1411d3a43609732d58f"} Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357159 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357224 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357291 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357345 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357398 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvbtw\" (UniqueName: \"kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357482 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357510 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.357601 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs\") pod \"4c67602c-9d2e-44a3-a187-f51e90798ba3\" (UID: \"4c67602c-9d2e-44a3-a187-f51e90798ba3\") " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.358313 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.358525 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs" (OuterVolumeSpecName: "logs") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.361105 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerID="2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1" exitCode=0 Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.361147 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerDied","Data":"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1"} Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.361177 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c67602c-9d2e-44a3-a187-f51e90798ba3","Type":"ContainerDied","Data":"eb06050263e65f121a7716bb6410dca9fd5fcd1fe4cf61de28583647e9ca596a"} Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.361194 4763 scope.go:117] "RemoveContainer" containerID="2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.361217 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.370141 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts" (OuterVolumeSpecName: "scripts") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.377604 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.377919 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw" (OuterVolumeSpecName: "kube-api-access-mvbtw") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "kube-api-access-mvbtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.413649 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.454246 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data" (OuterVolumeSpecName: "config-data") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461701 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461746 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461760 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461772 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvbtw\" (UniqueName: \"kubernetes.io/projected/4c67602c-9d2e-44a3-a187-f51e90798ba3-kube-api-access-mvbtw\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461784 4763 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461795 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.461804 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c67602c-9d2e-44a3-a187-f51e90798ba3-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.464455 4763 scope.go:117] "RemoveContainer" containerID="8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.466106 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4c67602c-9d2e-44a3-a187-f51e90798ba3" (UID: "4c67602c-9d2e-44a3-a187-f51e90798ba3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.491848 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.493233 4763 scope.go:117] "RemoveContainer" containerID="2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1" Dec 06 08:31:48 crc kubenswrapper[4763]: E1206 08:31:48.496302 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1\": container with ID starting with 2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1 not found: ID does not exist" containerID="2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.496346 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1"} err="failed to get container status \"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1\": rpc error: code = NotFound desc = could not find container \"2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1\": container with ID starting with 2704ec4dfe2d1a172fd2353dd8cc103a983a5ba2527a863744e8a5f378b9a5b1 not found: ID does not exist" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.496370 4763 scope.go:117] "RemoveContainer" containerID="8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9" Dec 06 08:31:48 crc kubenswrapper[4763]: E1206 08:31:48.500096 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9\": container with ID starting with 8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9 not found: ID does not exist" containerID="8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.500155 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9"} err="failed to get container status \"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9\": rpc error: code = NotFound desc = could not find container \"8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9\": container with ID starting with 8e696a158e59192566e58e44211f019986b5600079974ab3a6f8d977fb366ea9 not found: ID does not exist" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.564162 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.564191 4763 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c67602c-9d2e-44a3-a187-f51e90798ba3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.712248 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.730483 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.740756 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:48 crc kubenswrapper[4763]: E1206 08:31:48.741212 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-httpd" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.741232 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-httpd" Dec 06 08:31:48 crc kubenswrapper[4763]: E1206 08:31:48.741261 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-log" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.741267 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-log" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.741440 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-log" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.741458 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" containerName="glance-httpd" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.742570 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.746210 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.746380 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.753340 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.871738 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltm8d\" (UniqueName: \"kubernetes.io/projected/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-kube-api-access-ltm8d\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.871811 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.871844 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.871875 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.872199 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-config-data\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.872312 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.872375 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-scripts\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.872504 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-logs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.975167 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-config-data\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.975579 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.975624 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-scripts\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.975698 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-logs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.975932 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltm8d\" (UniqueName: \"kubernetes.io/projected/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-kube-api-access-ltm8d\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.976207 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-logs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.976858 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.977276 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.978333 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.978384 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.978962 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.980028 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.983438 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-scripts\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.988613 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.993824 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-config-data\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:48 crc kubenswrapper[4763]: I1206 08:31:48.996752 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltm8d\" (UniqueName: \"kubernetes.io/projected/9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9-kube-api-access-ltm8d\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.051878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9\") " pod="openstack/glance-default-external-api-0" Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.097836 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.375666 4763 generic.go:334] "Generic (PLEG): container finished" podID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerID="2c87cac4c45b34d6ade9c2630d14bf3a69128aee8227bc164c65239c1de87e46" exitCode=0 Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.376039 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerDied","Data":"2c87cac4c45b34d6ade9c2630d14bf3a69128aee8227bc164c65239c1de87e46"} Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.529042 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.529385 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-central-agent" containerID="cri-o://c6ee4a5644273508d844ddb9513f23b938dc9de3adccd204f19a2deeade5ec7e" gracePeriod=30 Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.529457 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="sg-core" containerID="cri-o://79ba783d9368d66361908c0ff5a6fd9aeaf1d4865b368239aecfd36fdc7d13ce" gracePeriod=30 Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.529523 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-notification-agent" containerID="cri-o://bfe5dbae931004786a2a8148052c457a45ec6acc21921d30966615d35de3a65b" gracePeriod=30 Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.529579 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="proxy-httpd" containerID="cri-o://3b5d636734408a792ee7c67ff4b9636f2127f9e650577e576161174b7cff6c31" gracePeriod=30 Dec 06 08:31:49 crc kubenswrapper[4763]: W1206 08:31:49.688840 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dd3ef40_c4d9_4fba_ae96_0ed0a747edd9.slice/crio-58bdd381d5b1bcc4c61d61d2904488b475718c101c4554e8727dc96e802f4316 WatchSource:0}: Error finding container 58bdd381d5b1bcc4c61d61d2904488b475718c101c4554e8727dc96e802f4316: Status 404 returned error can't find the container with id 58bdd381d5b1bcc4c61d61d2904488b475718c101c4554e8727dc96e802f4316 Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.689673 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 06 08:31:49 crc kubenswrapper[4763]: I1206 08:31:49.734148 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c67602c-9d2e-44a3-a187-f51e90798ba3" path="/var/lib/kubelet/pods/4c67602c-9d2e-44a3-a187-f51e90798ba3/volumes" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408157 4763 generic.go:334] "Generic (PLEG): container finished" podID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerID="3b5d636734408a792ee7c67ff4b9636f2127f9e650577e576161174b7cff6c31" exitCode=0 Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408480 4763 generic.go:334] "Generic (PLEG): container finished" podID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerID="79ba783d9368d66361908c0ff5a6fd9aeaf1d4865b368239aecfd36fdc7d13ce" exitCode=2 Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408490 4763 generic.go:334] "Generic (PLEG): container finished" podID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerID="bfe5dbae931004786a2a8148052c457a45ec6acc21921d30966615d35de3a65b" exitCode=0 Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408539 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerDied","Data":"3b5d636734408a792ee7c67ff4b9636f2127f9e650577e576161174b7cff6c31"} Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408572 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerDied","Data":"79ba783d9368d66361908c0ff5a6fd9aeaf1d4865b368239aecfd36fdc7d13ce"} Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.408585 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerDied","Data":"bfe5dbae931004786a2a8148052c457a45ec6acc21921d30966615d35de3a65b"} Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.423784 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9","Type":"ContainerStarted","Data":"58bdd381d5b1bcc4c61d61d2904488b475718c101c4554e8727dc96e802f4316"} Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.602734 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.731763 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2584n\" (UniqueName: \"kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.731815 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.731942 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.731972 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.732667 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs" (OuterVolumeSpecName: "logs") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.732864 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.733275 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.733323 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.733353 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle\") pod \"8b17ba6f-6373-4c93-b07d-73a464deec1a\" (UID: \"8b17ba6f-6373-4c93-b07d-73a464deec1a\") " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.734149 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.734686 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.737795 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts" (OuterVolumeSpecName: "scripts") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.739701 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n" (OuterVolumeSpecName: "kube-api-access-2584n") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "kube-api-access-2584n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.741309 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.794859 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.804517 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.811082 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data" (OuterVolumeSpecName: "config-data") pod "8b17ba6f-6373-4c93-b07d-73a464deec1a" (UID: "8b17ba6f-6373-4c93-b07d-73a464deec1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835711 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835752 4763 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b17ba6f-6373-4c93-b07d-73a464deec1a-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835775 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835788 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835804 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2584n\" (UniqueName: \"kubernetes.io/projected/8b17ba6f-6373-4c93-b07d-73a464deec1a-kube-api-access-2584n\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835814 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.835823 4763 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17ba6f-6373-4c93-b07d-73a464deec1a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.864344 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 06 08:31:50 crc kubenswrapper[4763]: I1206 08:31:50.937922 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.436999 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8b17ba6f-6373-4c93-b07d-73a464deec1a","Type":"ContainerDied","Data":"f0e3edde6d2aac968901206fffbce40f35180ade9bae5169c43bb2594a99565b"} Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.437020 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.437327 4763 scope.go:117] "RemoveContainer" containerID="2c87cac4c45b34d6ade9c2630d14bf3a69128aee8227bc164c65239c1de87e46" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.440854 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9","Type":"ContainerStarted","Data":"ca5359d244a9642534f3140a76b7c60392455ac0017f69074efb1b297ce4fa07"} Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.440893 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9","Type":"ContainerStarted","Data":"f1fc3145b02300cd759bf8ef100f183d152965d37c4c775cda2fa4c40c5f6af8"} Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.479226 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.479206671 podStartE2EDuration="3.479206671s" podCreationTimestamp="2025-12-06 08:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:51.47024432 +0000 UTC m=+1194.045949358" watchObservedRunningTime="2025-12-06 08:31:51.479206671 +0000 UTC m=+1194.054911709" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.485084 4763 scope.go:117] "RemoveContainer" containerID="23b9588da28ae21a6c7103f6638ff5ab0bc536c0da94d1411d3a43609732d58f" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.502854 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.518027 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.530617 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:51 crc kubenswrapper[4763]: E1206 08:31:51.531575 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-log" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.531674 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-log" Dec 06 08:31:51 crc kubenswrapper[4763]: E1206 08:31:51.531782 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-httpd" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.531837 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-httpd" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.532347 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-httpd" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.532466 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" containerName="glance-log" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.534471 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.537312 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.537749 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.563636 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.651585 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.651923 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-logs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652110 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652376 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw8jd\" (UniqueName: \"kubernetes.io/projected/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-kube-api-access-bw8jd\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652491 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652562 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652652 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.652853 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.730986 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b17ba6f-6373-4c93-b07d-73a464deec1a" path="/var/lib/kubelet/pods/8b17ba6f-6373-4c93-b07d-73a464deec1a/volumes" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754708 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754827 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754874 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754916 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-logs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754953 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.754984 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw8jd\" (UniqueName: \"kubernetes.io/projected/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-kube-api-access-bw8jd\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.755026 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.755043 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.755458 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.760132 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.760382 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.760507 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-logs\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.761079 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.764542 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.764608 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.791691 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw8jd\" (UniqueName: \"kubernetes.io/projected/a5315b7f-ca1a-48f1-92ad-30f8afcddf16-kube-api-access-bw8jd\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.795133 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"a5315b7f-ca1a-48f1-92ad-30f8afcddf16\") " pod="openstack/glance-default-internal-api-0" Dec 06 08:31:51 crc kubenswrapper[4763]: I1206 08:31:51.870614 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 06 08:31:52 crc kubenswrapper[4763]: I1206 08:31:52.534852 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 06 08:31:53 crc kubenswrapper[4763]: I1206 08:31:53.473316 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a5315b7f-ca1a-48f1-92ad-30f8afcddf16","Type":"ContainerStarted","Data":"60372a56e03f4491bc50319929fbb5f53add9fb94bdd7c8562c24b8f60948be9"} Dec 06 08:31:53 crc kubenswrapper[4763]: I1206 08:31:53.473853 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a5315b7f-ca1a-48f1-92ad-30f8afcddf16","Type":"ContainerStarted","Data":"dfe3cb8a0a40077d6798ba8a0f0e8a652ac5998af3f25d7d388796c76dfe8929"} Dec 06 08:31:54 crc kubenswrapper[4763]: I1206 08:31:54.500612 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a5315b7f-ca1a-48f1-92ad-30f8afcddf16","Type":"ContainerStarted","Data":"a7175be7e1ca63994ea5d600a2f94d53dd881e956daf474db635189709bbbbb0"} Dec 06 08:31:54 crc kubenswrapper[4763]: I1206 08:31:54.535051 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.535028312 podStartE2EDuration="3.535028312s" podCreationTimestamp="2025-12-06 08:31:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:31:54.52495208 +0000 UTC m=+1197.100657118" watchObservedRunningTime="2025-12-06 08:31:54.535028312 +0000 UTC m=+1197.110733350" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.511411 4763 generic.go:334] "Generic (PLEG): container finished" podID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerID="c6ee4a5644273508d844ddb9513f23b938dc9de3adccd204f19a2deeade5ec7e" exitCode=0 Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.511485 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerDied","Data":"c6ee4a5644273508d844ddb9513f23b938dc9de3adccd204f19a2deeade5ec7e"} Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.511526 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb","Type":"ContainerDied","Data":"83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed"} Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.511535 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83a35fcf68d2fb1aac1cfa3355836f7d80adf44c755b89c2e50b221c30dfd4ed" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.523888 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638232 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kn6s\" (UniqueName: \"kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638414 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638450 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638481 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638528 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638692 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.638755 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml\") pod \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\" (UID: \"703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb\") " Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.639498 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.643255 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.645191 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s" (OuterVolumeSpecName: "kube-api-access-8kn6s") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "kube-api-access-8kn6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.672271 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts" (OuterVolumeSpecName: "scripts") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.730164 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.747853 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.747886 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.747915 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kn6s\" (UniqueName: \"kubernetes.io/projected/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-kube-api-access-8kn6s\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.747928 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.747939 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.773186 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data" (OuterVolumeSpecName: "config-data") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.780227 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" (UID: "703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.851669 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:55 crc kubenswrapper[4763]: I1206 08:31:55.851699 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.520246 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.556856 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.577744 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591144 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:56 crc kubenswrapper[4763]: E1206 08:31:56.591572 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-notification-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591597 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-notification-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: E1206 08:31:56.591619 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="sg-core" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591628 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="sg-core" Dec 06 08:31:56 crc kubenswrapper[4763]: E1206 08:31:56.591647 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-central-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591655 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-central-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: E1206 08:31:56.591682 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="proxy-httpd" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591690 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="proxy-httpd" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591960 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="sg-core" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591982 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-notification-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.591999 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="ceilometer-central-agent" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.592015 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" containerName="proxy-httpd" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.596824 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.605455 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.605716 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.608674 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666339 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666420 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666451 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666474 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666537 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666565 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vw2n\" (UniqueName: \"kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.666601 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767723 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767763 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vw2n\" (UniqueName: \"kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767820 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767883 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767943 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767969 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.767990 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.768707 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.768767 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.773475 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.773655 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.773861 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.780725 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:56 crc kubenswrapper[4763]: I1206 08:31:56.799933 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vw2n\" (UniqueName: \"kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n\") pod \"ceilometer-0\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " pod="openstack/ceilometer-0" Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.011164 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.468325 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.537447 4763 generic.go:334] "Generic (PLEG): container finished" podID="a1d63988-0eda-49ae-b4ec-0cf81b1f9784" containerID="29ffe96153cb0d73b77807cd6b82344fc55acf089dc138c21c0ec7c68c43178f" exitCode=0 Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.537560 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" event={"ID":"a1d63988-0eda-49ae-b4ec-0cf81b1f9784","Type":"ContainerDied","Data":"29ffe96153cb0d73b77807cd6b82344fc55acf089dc138c21c0ec7c68c43178f"} Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.540158 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerStarted","Data":"c749a27cac4d5cbcb228896104dab85049572edbe3e86cad7a5e7dbc47325155"} Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.727805 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:31:57 crc kubenswrapper[4763]: E1206 08:31:57.728227 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 40s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(7731d4cb-7569-4783-842d-acef9e33cb50)\"" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" Dec 06 08:31:57 crc kubenswrapper[4763]: I1206 08:31:57.735347 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb" path="/var/lib/kubelet/pods/703c7bfb-9dd8-4acd-9a9f-a8efb81dbabb/volumes" Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.553141 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerStarted","Data":"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3"} Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.555057 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerStarted","Data":"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e"} Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.864198 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.917104 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts\") pod \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.917369 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data\") pod \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.917465 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle\") pod \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.917531 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wsrw\" (UniqueName: \"kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw\") pod \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\" (UID: \"a1d63988-0eda-49ae-b4ec-0cf81b1f9784\") " Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.926407 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts" (OuterVolumeSpecName: "scripts") pod "a1d63988-0eda-49ae-b4ec-0cf81b1f9784" (UID: "a1d63988-0eda-49ae-b4ec-0cf81b1f9784"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.926615 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw" (OuterVolumeSpecName: "kube-api-access-9wsrw") pod "a1d63988-0eda-49ae-b4ec-0cf81b1f9784" (UID: "a1d63988-0eda-49ae-b4ec-0cf81b1f9784"). InnerVolumeSpecName "kube-api-access-9wsrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.944977 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1d63988-0eda-49ae-b4ec-0cf81b1f9784" (UID: "a1d63988-0eda-49ae-b4ec-0cf81b1f9784"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:58 crc kubenswrapper[4763]: I1206 08:31:58.957199 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data" (OuterVolumeSpecName: "config-data") pod "a1d63988-0eda-49ae-b4ec-0cf81b1f9784" (UID: "a1d63988-0eda-49ae-b4ec-0cf81b1f9784"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.019721 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.019762 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.019772 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.019783 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wsrw\" (UniqueName: \"kubernetes.io/projected/a1d63988-0eda-49ae-b4ec-0cf81b1f9784-kube-api-access-9wsrw\") on node \"crc\" DevicePath \"\"" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.098981 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.099428 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.133187 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.142015 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.569706 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerStarted","Data":"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f"} Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.572730 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.572754 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7nmxm" event={"ID":"a1d63988-0eda-49ae-b4ec-0cf81b1f9784","Type":"ContainerDied","Data":"0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240"} Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.572834 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e0cc10663e419d711d9ce65fb53e29ee81631eec020ae44cb3f8c04ae334240" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.573248 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.573282 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.658564 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 06 08:31:59 crc kubenswrapper[4763]: E1206 08:31:59.658996 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1d63988-0eda-49ae-b4ec-0cf81b1f9784" containerName="nova-cell0-conductor-db-sync" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.659018 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1d63988-0eda-49ae-b4ec-0cf81b1f9784" containerName="nova-cell0-conductor-db-sync" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.659212 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1d63988-0eda-49ae-b4ec-0cf81b1f9784" containerName="nova-cell0-conductor-db-sync" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.659845 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.662395 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-g6xwf" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.662617 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.669045 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.736099 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.736215 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.736270 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7djg\" (UniqueName: \"kubernetes.io/projected/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-kube-api-access-m7djg\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.838702 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.838775 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.838811 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7djg\" (UniqueName: \"kubernetes.io/projected/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-kube-api-access-m7djg\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.845384 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.845469 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.861878 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7djg\" (UniqueName: \"kubernetes.io/projected/ae8fb3b7-c521-443c-b9cc-6f821a32e8a1-kube-api-access-m7djg\") pod \"nova-cell0-conductor-0\" (UID: \"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1\") " pod="openstack/nova-cell0-conductor-0" Dec 06 08:31:59 crc kubenswrapper[4763]: I1206 08:31:59.991680 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 06 08:32:00 crc kubenswrapper[4763]: W1206 08:32:00.444486 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae8fb3b7_c521_443c_b9cc_6f821a32e8a1.slice/crio-831ebcf1dbe9222b7516c48a6d6c5f2872d06483f89307c5acc633a020569722 WatchSource:0}: Error finding container 831ebcf1dbe9222b7516c48a6d6c5f2872d06483f89307c5acc633a020569722: Status 404 returned error can't find the container with id 831ebcf1dbe9222b7516c48a6d6c5f2872d06483f89307c5acc633a020569722 Dec 06 08:32:00 crc kubenswrapper[4763]: I1206 08:32:00.447453 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 06 08:32:00 crc kubenswrapper[4763]: I1206 08:32:00.585699 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerStarted","Data":"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875"} Dec 06 08:32:00 crc kubenswrapper[4763]: I1206 08:32:00.586179 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:32:00 crc kubenswrapper[4763]: I1206 08:32:00.587478 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1","Type":"ContainerStarted","Data":"831ebcf1dbe9222b7516c48a6d6c5f2872d06483f89307c5acc633a020569722"} Dec 06 08:32:00 crc kubenswrapper[4763]: I1206 08:32:00.617790 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.107346407 podStartE2EDuration="4.617764073s" podCreationTimestamp="2025-12-06 08:31:56 +0000 UTC" firstStartedPulling="2025-12-06 08:31:57.478631548 +0000 UTC m=+1200.054336586" lastFinishedPulling="2025-12-06 08:31:59.989049214 +0000 UTC m=+1202.564754252" observedRunningTime="2025-12-06 08:32:00.606747836 +0000 UTC m=+1203.182452874" watchObservedRunningTime="2025-12-06 08:32:00.617764073 +0000 UTC m=+1203.193469121" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.544414 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.559123 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.619503 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ae8fb3b7-c521-443c-b9cc-6f821a32e8a1","Type":"ContainerStarted","Data":"8f914090b9d4bd950a334caa67b3c16411416461106e8a0847898f5ff1f5a9ed"} Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.621010 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.648431 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.64838586 podStartE2EDuration="2.64838586s" podCreationTimestamp="2025-12-06 08:31:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:01.635709859 +0000 UTC m=+1204.211414897" watchObservedRunningTime="2025-12-06 08:32:01.64838586 +0000 UTC m=+1204.224090898" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.872526 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.872602 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.917934 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:01 crc kubenswrapper[4763]: I1206 08:32:01.931673 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:02 crc kubenswrapper[4763]: I1206 08:32:02.628261 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:02 crc kubenswrapper[4763]: I1206 08:32:02.628320 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:04 crc kubenswrapper[4763]: I1206 08:32:04.538296 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:04 crc kubenswrapper[4763]: I1206 08:32:04.544340 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.018423 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.441025 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-b47q2"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.442458 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.446329 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.446385 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.505666 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-b47q2"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.556728 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.556804 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvtxf\" (UniqueName: \"kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.556943 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.557027 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.658687 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.658785 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.658812 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvtxf\" (UniqueName: \"kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.658889 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.660694 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.662268 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.664657 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.674582 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.674702 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.677276 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.693667 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.724031 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvtxf\" (UniqueName: \"kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf\") pod \"nova-cell0-cell-mapping-b47q2\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.761196 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.761571 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.761591 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rznpr\" (UniqueName: \"kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.764345 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.775412 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.777138 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.791422 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.822292 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871217 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871296 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871428 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871445 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rznpr\" (UniqueName: \"kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871463 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42rfk\" (UniqueName: \"kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871515 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.871547 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.894844 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.905587 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.913211 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.914965 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.933810 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rznpr\" (UniqueName: \"kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr\") pod \"nova-scheduler-0\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.935340 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.955859 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974393 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974445 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974468 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974544 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc6p6\" (UniqueName: \"kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974570 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974595 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42rfk\" (UniqueName: \"kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974629 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974656 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974674 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.974708 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.975783 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.982473 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:10 crc kubenswrapper[4763]: I1206 08:32:10.997413 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.000467 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.006515 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.015401 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.025285 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42rfk\" (UniqueName: \"kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.033665 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data\") pod \"nova-metadata-0\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " pod="openstack/nova-metadata-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077272 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077326 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077404 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc6p6\" (UniqueName: \"kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077426 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077480 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077520 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077565 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077590 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mr9x\" (UniqueName: \"kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077615 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.077645 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.078742 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.079311 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.079890 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.080662 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.086447 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.105412 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.106738 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.112234 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.113214 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc6p6\" (UniqueName: \"kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6\") pod \"dnsmasq-dns-7b7fb879d9-9gtrm\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.170797 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181392 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181468 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181498 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f72lv\" (UniqueName: \"kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181558 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181593 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mr9x\" (UniqueName: \"kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181635 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.181686 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.183439 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.191402 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.191560 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.229738 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mr9x\" (UniqueName: \"kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x\") pod \"nova-api-0\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.273856 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.283734 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.283813 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f72lv\" (UniqueName: \"kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.283933 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.288131 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.292056 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.308461 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.317651 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f72lv\" (UniqueName: \"kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv\") pod \"nova-cell1-novncproxy-0\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.334573 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.433751 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.510821 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-b47q2"] Dec 06 08:32:11 crc kubenswrapper[4763]: W1206 08:32:11.591864 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dace35f_53fc_43b5_a8c7_d58ba87f496b.slice/crio-ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561 WatchSource:0}: Error finding container ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561: Status 404 returned error can't find the container with id ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561 Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.635102 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: W1206 08:32:11.644791 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddeb0ba5a_81b5_4dad_9a22_dc38cf94738c.slice/crio-a3658cfb06ec58f609b1b0c94b6310a376286be69762c1a5258960e82efb25ec WatchSource:0}: Error finding container a3658cfb06ec58f609b1b0c94b6310a376286be69762c1a5258960e82efb25ec: Status 404 returned error can't find the container with id a3658cfb06ec58f609b1b0c94b6310a376286be69762c1a5258960e82efb25ec Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.721155 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.736448 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b47q2" event={"ID":"9dace35f-53fc-43b5-a8c7-d58ba87f496b","Type":"ContainerStarted","Data":"ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561"} Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.736486 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"deb0ba5a-81b5-4dad-9a22-dc38cf94738c","Type":"ContainerStarted","Data":"a3658cfb06ec58f609b1b0c94b6310a376286be69762c1a5258960e82efb25ec"} Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.786646 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j9gvv"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.788048 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.794749 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.795081 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.799826 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j9gvv"] Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.901408 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.901493 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.901571 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bncl\" (UniqueName: \"kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.901610 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:11 crc kubenswrapper[4763]: I1206 08:32:11.939181 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:11 crc kubenswrapper[4763]: W1206 08:32:11.958023 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1253aae_34a4_49c8_9beb_e94fc89c4322.slice/crio-a9e6b191b06f79a074eb2538eb0c7f09705b9b31548bdffa19eaf68bae1f52da WatchSource:0}: Error finding container a9e6b191b06f79a074eb2538eb0c7f09705b9b31548bdffa19eaf68bae1f52da: Status 404 returned error can't find the container with id a9e6b191b06f79a074eb2538eb0c7f09705b9b31548bdffa19eaf68bae1f52da Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.004425 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.004612 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.004832 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bncl\" (UniqueName: \"kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.005659 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.011510 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.014103 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.025288 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.031512 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bncl\" (UniqueName: \"kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl\") pod \"nova-cell1-conductor-db-sync-j9gvv\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.078732 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.088684 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.187224 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.227436 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:12 crc kubenswrapper[4763]: W1206 08:32:12.241458 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod379b90db_1a4a_49ca_90bc_57701808262d.slice/crio-1fb1de741ff3e96347bf57ebc5b60b94866a822c0ffb6bcd772186b27cd6853e WatchSource:0}: Error finding container 1fb1de741ff3e96347bf57ebc5b60b94866a822c0ffb6bcd772186b27cd6853e: Status 404 returned error can't find the container with id 1fb1de741ff3e96347bf57ebc5b60b94866a822c0ffb6bcd772186b27cd6853e Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.710790 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j9gvv"] Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.760361 4763 generic.go:334] "Generic (PLEG): container finished" podID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerID="5942b76ccdc6d26f7c14bb1e6f052fe51d80f7094f8e26e26d2ab08868706bf2" exitCode=0 Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.760630 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" event={"ID":"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2","Type":"ContainerDied","Data":"5942b76ccdc6d26f7c14bb1e6f052fe51d80f7094f8e26e26d2ab08868706bf2"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.760701 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" event={"ID":"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2","Type":"ContainerStarted","Data":"bd4ba51d25b39fbb0a1415a3879f8c90044d74bd67d2ee1f8b8289adf9d6bcb7"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.767181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerStarted","Data":"a9e6b191b06f79a074eb2538eb0c7f09705b9b31548bdffa19eaf68bae1f52da"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.773591 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerStarted","Data":"b5689e6009818d0fbc3318c082b970bbdb6c0131b8b49c26f72354c54b59b2d1"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.794174 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerStarted","Data":"0061c2ecdd95e046acf807e6f8e7b0206e9e85206547ddca780c57afdc276c9c"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.820441 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"379b90db-1a4a-49ca-90bc-57701808262d","Type":"ContainerStarted","Data":"1fb1de741ff3e96347bf57ebc5b60b94866a822c0ffb6bcd772186b27cd6853e"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.822838 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b47q2" event={"ID":"9dace35f-53fc-43b5-a8c7-d58ba87f496b","Type":"ContainerStarted","Data":"e4b6c360a2a90838f5017bd14dbbbb0bbdb8fd3b2e7926caae7bafc0d6523d57"} Dec 06 08:32:12 crc kubenswrapper[4763]: I1206 08:32:12.845243 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-b47q2" podStartSLOduration=2.8451812260000002 podStartE2EDuration="2.845181226s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:12.838809895 +0000 UTC m=+1215.414514943" watchObservedRunningTime="2025-12-06 08:32:12.845181226 +0000 UTC m=+1215.420886264" Dec 06 08:32:13 crc kubenswrapper[4763]: I1206 08:32:13.846677 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" event={"ID":"40742493-d161-4853-89e0-f2841a3ea6d7","Type":"ContainerStarted","Data":"71746815d2277f36e12e35964ba609e6d8e92c6c2083fbcc299ef173faf590aa"} Dec 06 08:32:14 crc kubenswrapper[4763]: I1206 08:32:14.336561 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:14 crc kubenswrapper[4763]: I1206 08:32:14.407531 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.889730 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"deb0ba5a-81b5-4dad-9a22-dc38cf94738c","Type":"ContainerStarted","Data":"07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.899834 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" event={"ID":"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2","Type":"ContainerStarted","Data":"f0712d5b17eff9cfa0b28eee4e4d3b4fde40c63ae551fa97facc50762002bb36"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.900725 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.905649 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerStarted","Data":"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.907781 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" event={"ID":"40742493-d161-4853-89e0-f2841a3ea6d7","Type":"ContainerStarted","Data":"a511531cbcddccb6b2e61346bfd21c4943905b42915613641facfe34c4bb6ae1"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.916247 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerStarted","Data":"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.918621 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"379b90db-1a4a-49ca-90bc-57701808262d","Type":"ContainerStarted","Data":"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037"} Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.918748 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="379b90db-1a4a-49ca-90bc-57701808262d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037" gracePeriod=30 Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.924265 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.172703437 podStartE2EDuration="5.924243212s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="2025-12-06 08:32:11.646562123 +0000 UTC m=+1214.222267161" lastFinishedPulling="2025-12-06 08:32:15.398101898 +0000 UTC m=+1217.973806936" observedRunningTime="2025-12-06 08:32:15.91224944 +0000 UTC m=+1218.487954488" watchObservedRunningTime="2025-12-06 08:32:15.924243212 +0000 UTC m=+1218.499948250" Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.935951 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.937059 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" podStartSLOduration=5.937042067 podStartE2EDuration="5.937042067s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:15.935765494 +0000 UTC m=+1218.511470542" watchObservedRunningTime="2025-12-06 08:32:15.937042067 +0000 UTC m=+1218.512747105" Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.958213 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" podStartSLOduration=4.958192918 podStartE2EDuration="4.958192918s" podCreationTimestamp="2025-12-06 08:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:15.955352861 +0000 UTC m=+1218.531057899" watchObservedRunningTime="2025-12-06 08:32:15.958192918 +0000 UTC m=+1218.533897956" Dec 06 08:32:15 crc kubenswrapper[4763]: I1206 08:32:15.982518 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.830227763 podStartE2EDuration="5.982499032s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="2025-12-06 08:32:12.246586169 +0000 UTC m=+1214.822291207" lastFinishedPulling="2025-12-06 08:32:15.398857438 +0000 UTC m=+1217.974562476" observedRunningTime="2025-12-06 08:32:15.970342735 +0000 UTC m=+1218.546047783" watchObservedRunningTime="2025-12-06 08:32:15.982499032 +0000 UTC m=+1218.558204070" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.434521 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.528116 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.528164 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.555505 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.928434 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerStarted","Data":"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d"} Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.932006 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-log" containerID="cri-o://2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" gracePeriod=30 Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.932261 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerStarted","Data":"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72"} Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.934089 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-metadata" containerID="cri-o://574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" gracePeriod=30 Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.972658 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.551161908 podStartE2EDuration="6.972639569s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="2025-12-06 08:32:11.978958999 +0000 UTC m=+1214.554664047" lastFinishedPulling="2025-12-06 08:32:15.40043667 +0000 UTC m=+1217.976141708" observedRunningTime="2025-12-06 08:32:16.961664413 +0000 UTC m=+1219.537369461" watchObservedRunningTime="2025-12-06 08:32:16.972639569 +0000 UTC m=+1219.548344607" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.977156 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:16 crc kubenswrapper[4763]: I1206 08:32:16.987207 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.673661797 podStartE2EDuration="6.987187951s" podCreationTimestamp="2025-12-06 08:32:10 +0000 UTC" firstStartedPulling="2025-12-06 08:32:12.084558063 +0000 UTC m=+1214.660263101" lastFinishedPulling="2025-12-06 08:32:15.398084217 +0000 UTC m=+1217.973789255" observedRunningTime="2025-12-06 08:32:16.978431665 +0000 UTC m=+1219.554136703" watchObservedRunningTime="2025-12-06 08:32:16.987187951 +0000 UTC m=+1219.562892989" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.050472 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.549990 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.739220 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data\") pod \"4dd89a9a-687e-478b-9d96-67252b402c60\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.739274 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle\") pod \"4dd89a9a-687e-478b-9d96-67252b402c60\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.739353 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs\") pod \"4dd89a9a-687e-478b-9d96-67252b402c60\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.739439 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42rfk\" (UniqueName: \"kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk\") pod \"4dd89a9a-687e-478b-9d96-67252b402c60\" (UID: \"4dd89a9a-687e-478b-9d96-67252b402c60\") " Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.739999 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs" (OuterVolumeSpecName: "logs") pod "4dd89a9a-687e-478b-9d96-67252b402c60" (UID: "4dd89a9a-687e-478b-9d96-67252b402c60"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.744922 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk" (OuterVolumeSpecName: "kube-api-access-42rfk") pod "4dd89a9a-687e-478b-9d96-67252b402c60" (UID: "4dd89a9a-687e-478b-9d96-67252b402c60"). InnerVolumeSpecName "kube-api-access-42rfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.773586 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dd89a9a-687e-478b-9d96-67252b402c60" (UID: "4dd89a9a-687e-478b-9d96-67252b402c60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.775420 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data" (OuterVolumeSpecName: "config-data") pod "4dd89a9a-687e-478b-9d96-67252b402c60" (UID: "4dd89a9a-687e-478b-9d96-67252b402c60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.842631 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dd89a9a-687e-478b-9d96-67252b402c60-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.842884 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42rfk\" (UniqueName: \"kubernetes.io/projected/4dd89a9a-687e-478b-9d96-67252b402c60-kube-api-access-42rfk\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.842997 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.843015 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd89a9a-687e-478b-9d96-67252b402c60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943279 4763 generic.go:334] "Generic (PLEG): container finished" podID="4dd89a9a-687e-478b-9d96-67252b402c60" containerID="574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" exitCode=0 Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943581 4763 generic.go:334] "Generic (PLEG): container finished" podID="4dd89a9a-687e-478b-9d96-67252b402c60" containerID="2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" exitCode=143 Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943343 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943363 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerDied","Data":"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72"} Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943788 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerDied","Data":"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0"} Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943806 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4dd89a9a-687e-478b-9d96-67252b402c60","Type":"ContainerDied","Data":"0061c2ecdd95e046acf807e6f8e7b0206e9e85206547ddca780c57afdc276c9c"} Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.943823 4763 scope.go:117] "RemoveContainer" containerID="574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.968581 4763 scope.go:117] "RemoveContainer" containerID="2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.988931 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.995827 4763 scope.go:117] "RemoveContainer" containerID="574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" Dec 06 08:32:17 crc kubenswrapper[4763]: E1206 08:32:17.998812 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72\": container with ID starting with 574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72 not found: ID does not exist" containerID="574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.998852 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72"} err="failed to get container status \"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72\": rpc error: code = NotFound desc = could not find container \"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72\": container with ID starting with 574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72 not found: ID does not exist" Dec 06 08:32:17 crc kubenswrapper[4763]: I1206 08:32:17.998879 4763 scope.go:117] "RemoveContainer" containerID="2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" Dec 06 08:32:18 crc kubenswrapper[4763]: E1206 08:32:18.002360 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0\": container with ID starting with 2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0 not found: ID does not exist" containerID="2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.002400 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0"} err="failed to get container status \"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0\": rpc error: code = NotFound desc = could not find container \"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0\": container with ID starting with 2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0 not found: ID does not exist" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.002425 4763 scope.go:117] "RemoveContainer" containerID="574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.004250 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72"} err="failed to get container status \"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72\": rpc error: code = NotFound desc = could not find container \"574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72\": container with ID starting with 574f1885328446fe4e44240ad2216f9b9786bd3b65ce0fb2ba435a031a334e72 not found: ID does not exist" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.004275 4763 scope.go:117] "RemoveContainer" containerID="2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.006133 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.006868 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0"} err="failed to get container status \"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0\": rpc error: code = NotFound desc = could not find container \"2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0\": container with ID starting with 2f6a0c338884f39e216f5be3562aea03320c98cf6f80dd0324386722eb817eb0 not found: ID does not exist" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.017964 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:18 crc kubenswrapper[4763]: E1206 08:32:18.018528 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-log" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.018542 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-log" Dec 06 08:32:18 crc kubenswrapper[4763]: E1206 08:32:18.018558 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-metadata" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.018567 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-metadata" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.018816 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-metadata" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.018845 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" containerName="nova-metadata-log" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.020265 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.021981 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.025138 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.052244 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.158871 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.159226 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.160016 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.160425 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsb7n\" (UniqueName: \"kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.160502 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.262168 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsb7n\" (UniqueName: \"kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.262238 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.262305 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.262331 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.262484 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.263023 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.267742 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.268389 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.270570 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.285578 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsb7n\" (UniqueName: \"kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n\") pod \"nova-metadata-0\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.402378 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:18 crc kubenswrapper[4763]: W1206 08:32:18.899111 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cf24aa4_2a38_4540_96ba_795bca5a17da.slice/crio-7df8c9b140856280d0716724bd3b1b72801caef32e7245f88913ec9303b09692 WatchSource:0}: Error finding container 7df8c9b140856280d0716724bd3b1b72801caef32e7245f88913ec9303b09692: Status 404 returned error can't find the container with id 7df8c9b140856280d0716724bd3b1b72801caef32e7245f88913ec9303b09692 Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.902259 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.984370 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerStarted","Data":"7df8c9b140856280d0716724bd3b1b72801caef32e7245f88913ec9303b09692"} Dec 06 08:32:18 crc kubenswrapper[4763]: I1206 08:32:18.986029 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" containerID="cri-o://b5689e6009818d0fbc3318c082b970bbdb6c0131b8b49c26f72354c54b59b2d1" gracePeriod=30 Dec 06 08:32:19 crc kubenswrapper[4763]: I1206 08:32:19.731626 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dd89a9a-687e-478b-9d96-67252b402c60" path="/var/lib/kubelet/pods/4dd89a9a-687e-478b-9d96-67252b402c60/volumes" Dec 06 08:32:20 crc kubenswrapper[4763]: I1206 08:32:20.000524 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerStarted","Data":"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3"} Dec 06 08:32:20 crc kubenswrapper[4763]: I1206 08:32:20.000590 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerStarted","Data":"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801"} Dec 06 08:32:20 crc kubenswrapper[4763]: I1206 08:32:20.023887 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.023854345 podStartE2EDuration="3.023854345s" podCreationTimestamp="2025-12-06 08:32:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:20.021543353 +0000 UTC m=+1222.597248481" watchObservedRunningTime="2025-12-06 08:32:20.023854345 +0000 UTC m=+1222.599559423" Dec 06 08:32:20 crc kubenswrapper[4763]: I1206 08:32:20.936866 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 06 08:32:20 crc kubenswrapper[4763]: I1206 08:32:20.989784 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.023762 4763 generic.go:334] "Generic (PLEG): container finished" podID="9dace35f-53fc-43b5-a8c7-d58ba87f496b" containerID="e4b6c360a2a90838f5017bd14dbbbb0bbdb8fd3b2e7926caae7bafc0d6523d57" exitCode=0 Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.024486 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b47q2" event={"ID":"9dace35f-53fc-43b5-a8c7-d58ba87f496b","Type":"ContainerDied","Data":"e4b6c360a2a90838f5017bd14dbbbb0bbdb8fd3b2e7926caae7bafc0d6523d57"} Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.058869 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.310293 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.336733 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.336789 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.384643 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:32:21 crc kubenswrapper[4763]: I1206 08:32:21.384888 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84c44f58df-s6697" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="dnsmasq-dns" containerID="cri-o://45165ad41c822d9525863f060df2a2eba8ae327f7248300d8f6bcc59b3801870" gracePeriod=10 Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.053430 4763 generic.go:334] "Generic (PLEG): container finished" podID="7731d4cb-7569-4783-842d-acef9e33cb50" containerID="b5689e6009818d0fbc3318c082b970bbdb6c0131b8b49c26f72354c54b59b2d1" exitCode=0 Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.053920 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"b5689e6009818d0fbc3318c082b970bbdb6c0131b8b49c26f72354c54b59b2d1"} Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.053972 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"7731d4cb-7569-4783-842d-acef9e33cb50","Type":"ContainerDied","Data":"320085ec70a49cc4b2976aaa26e4764cf735058e7c0da9457716df82a268330f"} Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.053990 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="320085ec70a49cc4b2976aaa26e4764cf735058e7c0da9457716df82a268330f" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.054013 4763 scope.go:117] "RemoveContainer" containerID="dcc051956abdc610e17bef83d6aaf61ff0f8cab2599d0512d3bc72f70b0f9015" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.059627 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.060018 4763 generic.go:334] "Generic (PLEG): container finished" podID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerID="45165ad41c822d9525863f060df2a2eba8ae327f7248300d8f6bcc59b3801870" exitCode=0 Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.060178 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c44f58df-s6697" event={"ID":"4c5b406a-e0c3-4856-a55a-58d3403994cc","Type":"ContainerDied","Data":"45165ad41c822d9525863f060df2a2eba8ae327f7248300d8f6bcc59b3801870"} Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.060215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c44f58df-s6697" event={"ID":"4c5b406a-e0c3-4856-a55a-58d3403994cc","Type":"ContainerDied","Data":"d9e65f223d375a09ad3bb7664d9b685d1c5c53dc1dd55ab3c70793692cc1496f"} Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.060228 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9e65f223d375a09ad3bb7664d9b685d1c5c53dc1dd55ab3c70793692cc1496f" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.082620 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.150934 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151244 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151276 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle\") pod \"7731d4cb-7569-4783-842d-acef9e33cb50\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151358 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151403 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9tk7\" (UniqueName: \"kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7\") pod \"7731d4cb-7569-4783-842d-acef9e33cb50\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151432 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data\") pod \"7731d4cb-7569-4783-842d-acef9e33cb50\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151473 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151511 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca\") pod \"7731d4cb-7569-4783-842d-acef9e33cb50\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151573 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs\") pod \"7731d4cb-7569-4783-842d-acef9e33cb50\" (UID: \"7731d4cb-7569-4783-842d-acef9e33cb50\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151673 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llw72\" (UniqueName: \"kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.151700 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0\") pod \"4c5b406a-e0c3-4856-a55a-58d3403994cc\" (UID: \"4c5b406a-e0c3-4856-a55a-58d3403994cc\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.159035 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs" (OuterVolumeSpecName: "logs") pod "7731d4cb-7569-4783-842d-acef9e33cb50" (UID: "7731d4cb-7569-4783-842d-acef9e33cb50"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.171790 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72" (OuterVolumeSpecName: "kube-api-access-llw72") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "kube-api-access-llw72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.186273 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7" (OuterVolumeSpecName: "kube-api-access-x9tk7") pod "7731d4cb-7569-4783-842d-acef9e33cb50" (UID: "7731d4cb-7569-4783-842d-acef9e33cb50"). InnerVolumeSpecName "kube-api-access-x9tk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.227474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "7731d4cb-7569-4783-842d-acef9e33cb50" (UID: "7731d4cb-7569-4783-842d-acef9e33cb50"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.254823 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9tk7\" (UniqueName: \"kubernetes.io/projected/7731d4cb-7569-4783-842d-acef9e33cb50-kube-api-access-x9tk7\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.254863 4763 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.254878 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7731d4cb-7569-4783-842d-acef9e33cb50-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.254908 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llw72\" (UniqueName: \"kubernetes.io/projected/4c5b406a-e0c3-4856-a55a-58d3403994cc-kube-api-access-llw72\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.273016 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7731d4cb-7569-4783-842d-acef9e33cb50" (UID: "7731d4cb-7569-4783-842d-acef9e33cb50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.273943 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.304386 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.304497 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data" (OuterVolumeSpecName: "config-data") pod "7731d4cb-7569-4783-842d-acef9e33cb50" (UID: "7731d4cb-7569-4783-842d-acef9e33cb50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.316048 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config" (OuterVolumeSpecName: "config") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.327162 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.331636 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4c5b406a-e0c3-4856-a55a-58d3403994cc" (UID: "4c5b406a-e0c3-4856-a55a-58d3403994cc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356425 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356465 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356475 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356489 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356498 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356506 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7731d4cb-7569-4783-842d-acef9e33cb50-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.356514 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c5b406a-e0c3-4856-a55a-58d3403994cc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.418330 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.418349 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.530732 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.662325 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts\") pod \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.662935 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle\") pod \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.663157 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvtxf\" (UniqueName: \"kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf\") pod \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.663236 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data\") pod \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\" (UID: \"9dace35f-53fc-43b5-a8c7-d58ba87f496b\") " Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.672320 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts" (OuterVolumeSpecName: "scripts") pod "9dace35f-53fc-43b5-a8c7-d58ba87f496b" (UID: "9dace35f-53fc-43b5-a8c7-d58ba87f496b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.689485 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf" (OuterVolumeSpecName: "kube-api-access-dvtxf") pod "9dace35f-53fc-43b5-a8c7-d58ba87f496b" (UID: "9dace35f-53fc-43b5-a8c7-d58ba87f496b"). InnerVolumeSpecName "kube-api-access-dvtxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.697019 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dace35f-53fc-43b5-a8c7-d58ba87f496b" (UID: "9dace35f-53fc-43b5-a8c7-d58ba87f496b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.697529 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data" (OuterVolumeSpecName: "config-data") pod "9dace35f-53fc-43b5-a8c7-d58ba87f496b" (UID: "9dace35f-53fc-43b5-a8c7-d58ba87f496b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.765237 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvtxf\" (UniqueName: \"kubernetes.io/projected/9dace35f-53fc-43b5-a8c7-d58ba87f496b-kube-api-access-dvtxf\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.765283 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.765294 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:22 crc kubenswrapper[4763]: I1206 08:32:22.765304 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dace35f-53fc-43b5-a8c7-d58ba87f496b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.076511 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b47q2" event={"ID":"9dace35f-53fc-43b5-a8c7-d58ba87f496b","Type":"ContainerDied","Data":"ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561"} Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.076564 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea151a44e4f8c1b7cec05101444b9d110976f869e8ad1b9d54a5d1ce6b2fa561" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.076629 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b47q2" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.084410 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c44f58df-s6697" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.093595 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.148384 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.167970 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.186570 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187081 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187105 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187122 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187129 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187138 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="init" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187147 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="init" Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187165 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="dnsmasq-dns" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187174 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="dnsmasq-dns" Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187189 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dace35f-53fc-43b5-a8c7-d58ba87f496b" containerName="nova-manage" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187197 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dace35f-53fc-43b5-a8c7-d58ba87f496b" containerName="nova-manage" Dec 06 08:32:23 crc kubenswrapper[4763]: E1206 08:32:23.187222 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187230 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187475 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187491 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187507 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187522 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" containerName="dnsmasq-dns" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.187546 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dace35f-53fc-43b5-a8c7-d58ba87f496b" containerName="nova-manage" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.188462 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.196643 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.206918 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.221547 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84c44f58df-s6697"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.229784 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.276084 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2180bb1-fd83-4725-be6e-be8c0a976e5b-logs\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.276134 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.276231 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv7cq\" (UniqueName: \"kubernetes.io/projected/b2180bb1-fd83-4725-be6e-be8c0a976e5b-kube-api-access-lv7cq\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.276273 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.276333 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-config-data\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.285757 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.286059 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-log" containerID="cri-o://5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850" gracePeriod=30 Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.286773 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-api" containerID="cri-o://d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d" gracePeriod=30 Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.300249 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.300463 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerName="nova-scheduler-scheduler" containerID="cri-o://07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" gracePeriod=30 Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.318448 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.318661 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-log" containerID="cri-o://8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" gracePeriod=30 Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.318735 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-metadata" containerID="cri-o://4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" gracePeriod=30 Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.377668 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2180bb1-fd83-4725-be6e-be8c0a976e5b-logs\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.377718 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.377815 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv7cq\" (UniqueName: \"kubernetes.io/projected/b2180bb1-fd83-4725-be6e-be8c0a976e5b-kube-api-access-lv7cq\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.377846 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.377887 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-config-data\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.378189 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2180bb1-fd83-4725-be6e-be8c0a976e5b-logs\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.381544 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.383511 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.386469 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2180bb1-fd83-4725-be6e-be8c0a976e5b-config-data\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.396306 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv7cq\" (UniqueName: \"kubernetes.io/projected/b2180bb1-fd83-4725-be6e-be8c0a976e5b-kube-api-access-lv7cq\") pod \"watcher-decision-engine-0\" (UID: \"b2180bb1-fd83-4725-be6e-be8c0a976e5b\") " pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.402851 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.403084 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.570663 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.745218 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c5b406a-e0c3-4856-a55a-58d3403994cc" path="/var/lib/kubelet/pods/4c5b406a-e0c3-4856-a55a-58d3403994cc/volumes" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.747265 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" path="/var/lib/kubelet/pods/7731d4cb-7569-4783-842d-acef9e33cb50/volumes" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.877831 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.990018 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle\") pod \"6cf24aa4-2a38-4540-96ba-795bca5a17da\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.990091 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs\") pod \"6cf24aa4-2a38-4540-96ba-795bca5a17da\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.990136 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data\") pod \"6cf24aa4-2a38-4540-96ba-795bca5a17da\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.990242 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs\") pod \"6cf24aa4-2a38-4540-96ba-795bca5a17da\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.990311 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsb7n\" (UniqueName: \"kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n\") pod \"6cf24aa4-2a38-4540-96ba-795bca5a17da\" (UID: \"6cf24aa4-2a38-4540-96ba-795bca5a17da\") " Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.991533 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs" (OuterVolumeSpecName: "logs") pod "6cf24aa4-2a38-4540-96ba-795bca5a17da" (UID: "6cf24aa4-2a38-4540-96ba-795bca5a17da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:23 crc kubenswrapper[4763]: I1206 08:32:23.996753 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n" (OuterVolumeSpecName: "kube-api-access-rsb7n") pod "6cf24aa4-2a38-4540-96ba-795bca5a17da" (UID: "6cf24aa4-2a38-4540-96ba-795bca5a17da"). InnerVolumeSpecName "kube-api-access-rsb7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.029474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data" (OuterVolumeSpecName: "config-data") pod "6cf24aa4-2a38-4540-96ba-795bca5a17da" (UID: "6cf24aa4-2a38-4540-96ba-795bca5a17da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.035766 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6cf24aa4-2a38-4540-96ba-795bca5a17da" (UID: "6cf24aa4-2a38-4540-96ba-795bca5a17da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.051088 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6cf24aa4-2a38-4540-96ba-795bca5a17da" (UID: "6cf24aa4-2a38-4540-96ba-795bca5a17da"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.092160 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.092196 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf24aa4-2a38-4540-96ba-795bca5a17da-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.092207 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.092243 4763 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cf24aa4-2a38-4540-96ba-795bca5a17da-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.092256 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsb7n\" (UniqueName: \"kubernetes.io/projected/6cf24aa4-2a38-4540-96ba-795bca5a17da-kube-api-access-rsb7n\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.109142 4763 generic.go:334] "Generic (PLEG): container finished" podID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerID="5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850" exitCode=143 Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.109199 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerDied","Data":"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850"} Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114884 4763 generic.go:334] "Generic (PLEG): container finished" podID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerID="4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" exitCode=0 Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114931 4763 generic.go:334] "Generic (PLEG): container finished" podID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerID="8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" exitCode=143 Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114951 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114955 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerDied","Data":"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3"} Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114984 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerDied","Data":"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801"} Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.114997 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6cf24aa4-2a38-4540-96ba-795bca5a17da","Type":"ContainerDied","Data":"7df8c9b140856280d0716724bd3b1b72801caef32e7245f88913ec9303b09692"} Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.115015 4763 scope.go:117] "RemoveContainer" containerID="4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.126935 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.148632 4763 scope.go:117] "RemoveContainer" containerID="8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.169502 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.178424 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.190970 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.191415 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-log" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191426 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-log" Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.191442 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191448 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.191459 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191467 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.191501 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-metadata" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191511 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-metadata" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191749 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-log" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191761 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191772 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731d4cb-7569-4783-842d-acef9e33cb50" containerName="watcher-decision-engine" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.191787 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" containerName="nova-metadata-metadata" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.193295 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.196241 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.197397 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.217036 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.223633 4763 scope.go:117] "RemoveContainer" containerID="4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.224066 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3\": container with ID starting with 4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3 not found: ID does not exist" containerID="4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.224145 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3"} err="failed to get container status \"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3\": rpc error: code = NotFound desc = could not find container \"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3\": container with ID starting with 4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3 not found: ID does not exist" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.224195 4763 scope.go:117] "RemoveContainer" containerID="8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" Dec 06 08:32:24 crc kubenswrapper[4763]: E1206 08:32:24.230602 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801\": container with ID starting with 8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801 not found: ID does not exist" containerID="8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.230707 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801"} err="failed to get container status \"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801\": rpc error: code = NotFound desc = could not find container \"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801\": container with ID starting with 8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801 not found: ID does not exist" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.230826 4763 scope.go:117] "RemoveContainer" containerID="4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.231358 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3"} err="failed to get container status \"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3\": rpc error: code = NotFound desc = could not find container \"4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3\": container with ID starting with 4db39f64f865046577d0d31793bc67cac131aab6c54b62feebdd32a601a762d3 not found: ID does not exist" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.231402 4763 scope.go:117] "RemoveContainer" containerID="8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.231943 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801"} err="failed to get container status \"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801\": rpc error: code = NotFound desc = could not find container \"8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801\": container with ID starting with 8df5c54d87197a3fa6d2d48ff4bb70c3dc698e8430c61714d770046ed0bdb801 not found: ID does not exist" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.299973 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.300296 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.300370 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.300406 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.300431 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrkdj\" (UniqueName: \"kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.402206 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.402353 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.402408 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.402445 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrkdj\" (UniqueName: \"kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.402553 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.403452 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.406528 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.407192 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.407248 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.421781 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrkdj\" (UniqueName: \"kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj\") pod \"nova-metadata-0\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.517845 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:32:24 crc kubenswrapper[4763]: W1206 08:32:24.980964 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c175d78_39a6_4a2c_a6a2_f702d982cf08.slice/crio-b3b8cd58c42bf4b54ccbf78451265b09ad2c9246092471188944e2163566568d WatchSource:0}: Error finding container b3b8cd58c42bf4b54ccbf78451265b09ad2c9246092471188944e2163566568d: Status 404 returned error can't find the container with id b3b8cd58c42bf4b54ccbf78451265b09ad2c9246092471188944e2163566568d Dec 06 08:32:24 crc kubenswrapper[4763]: I1206 08:32:24.983498 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.127158 4763 generic.go:334] "Generic (PLEG): container finished" podID="40742493-d161-4853-89e0-f2841a3ea6d7" containerID="a511531cbcddccb6b2e61346bfd21c4943905b42915613641facfe34c4bb6ae1" exitCode=0 Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.127207 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" event={"ID":"40742493-d161-4853-89e0-f2841a3ea6d7","Type":"ContainerDied","Data":"a511531cbcddccb6b2e61346bfd21c4943905b42915613641facfe34c4bb6ae1"} Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.129925 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"b2180bb1-fd83-4725-be6e-be8c0a976e5b","Type":"ContainerStarted","Data":"1bdacbe2e470b290d5ced8535ab465caa2bb8d6a6b420184e967455416221384"} Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.129948 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"b2180bb1-fd83-4725-be6e-be8c0a976e5b","Type":"ContainerStarted","Data":"e7e4018aa6eaf6d466ff1c22bde677ad5b3920160d30997af5dafb7012fc301d"} Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.131976 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerStarted","Data":"b3b8cd58c42bf4b54ccbf78451265b09ad2c9246092471188944e2163566568d"} Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.164191 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.164172397 podStartE2EDuration="2.164172397s" podCreationTimestamp="2025-12-06 08:32:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:25.158133753 +0000 UTC m=+1227.733838791" watchObservedRunningTime="2025-12-06 08:32:25.164172397 +0000 UTC m=+1227.739877435" Dec 06 08:32:25 crc kubenswrapper[4763]: I1206 08:32:25.737765 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cf24aa4-2a38-4540-96ba-795bca5a17da" path="/var/lib/kubelet/pods/6cf24aa4-2a38-4540-96ba-795bca5a17da/volumes" Dec 06 08:32:25 crc kubenswrapper[4763]: E1206 08:32:25.938864 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:32:25 crc kubenswrapper[4763]: E1206 08:32:25.940558 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:32:25 crc kubenswrapper[4763]: E1206 08:32:25.942142 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:32:25 crc kubenswrapper[4763]: E1206 08:32:25.942185 4763 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerName="nova-scheduler-scheduler" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.152770 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerStarted","Data":"5bcfcd5834b3a13a164ccf04fe2a086ca69c66f987aeef361d2445fcfb708ea0"} Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.152808 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerStarted","Data":"4b106e7e443b52ddc623381fc4726fba765334377aca316017e5a5caf2620225"} Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.200388 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.200364054 podStartE2EDuration="2.200364054s" podCreationTimestamp="2025-12-06 08:32:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:26.179315437 +0000 UTC m=+1228.755020475" watchObservedRunningTime="2025-12-06 08:32:26.200364054 +0000 UTC m=+1228.776069102" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.736266 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.846059 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle\") pod \"40742493-d161-4853-89e0-f2841a3ea6d7\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.846265 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts\") pod \"40742493-d161-4853-89e0-f2841a3ea6d7\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.846366 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bncl\" (UniqueName: \"kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl\") pod \"40742493-d161-4853-89e0-f2841a3ea6d7\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.846446 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data\") pod \"40742493-d161-4853-89e0-f2841a3ea6d7\" (UID: \"40742493-d161-4853-89e0-f2841a3ea6d7\") " Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.852045 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts" (OuterVolumeSpecName: "scripts") pod "40742493-d161-4853-89e0-f2841a3ea6d7" (UID: "40742493-d161-4853-89e0-f2841a3ea6d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.853160 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl" (OuterVolumeSpecName: "kube-api-access-2bncl") pod "40742493-d161-4853-89e0-f2841a3ea6d7" (UID: "40742493-d161-4853-89e0-f2841a3ea6d7"). InnerVolumeSpecName "kube-api-access-2bncl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.878332 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data" (OuterVolumeSpecName: "config-data") pod "40742493-d161-4853-89e0-f2841a3ea6d7" (UID: "40742493-d161-4853-89e0-f2841a3ea6d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.880635 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40742493-d161-4853-89e0-f2841a3ea6d7" (UID: "40742493-d161-4853-89e0-f2841a3ea6d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.949266 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.949299 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bncl\" (UniqueName: \"kubernetes.io/projected/40742493-d161-4853-89e0-f2841a3ea6d7-kube-api-access-2bncl\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.949309 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:26 crc kubenswrapper[4763]: I1206 08:32:26.949317 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40742493-d161-4853-89e0-f2841a3ea6d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.017847 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.167219 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.172297 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j9gvv" event={"ID":"40742493-d161-4853-89e0-f2841a3ea6d7","Type":"ContainerDied","Data":"71746815d2277f36e12e35964ba609e6d8e92c6c2083fbcc299ef173faf590aa"} Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.172360 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71746815d2277f36e12e35964ba609e6d8e92c6c2083fbcc299ef173faf590aa" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.251177 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 06 08:32:27 crc kubenswrapper[4763]: E1206 08:32:27.251826 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40742493-d161-4853-89e0-f2841a3ea6d7" containerName="nova-cell1-conductor-db-sync" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.251889 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="40742493-d161-4853-89e0-f2841a3ea6d7" containerName="nova-cell1-conductor-db-sync" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.252306 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="40742493-d161-4853-89e0-f2841a3ea6d7" containerName="nova-cell1-conductor-db-sync" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.257210 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.259552 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.269580 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.359789 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.359918 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.359951 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjtqj\" (UniqueName: \"kubernetes.io/projected/69005ae1-9200-4160-8026-7f672b8c30cc-kube-api-access-mjtqj\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.461841 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.461916 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjtqj\" (UniqueName: \"kubernetes.io/projected/69005ae1-9200-4160-8026-7f672b8c30cc-kube-api-access-mjtqj\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.461999 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.467992 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.474384 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69005ae1-9200-4160-8026-7f672b8c30cc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.486579 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjtqj\" (UniqueName: \"kubernetes.io/projected/69005ae1-9200-4160-8026-7f672b8c30cc-kube-api-access-mjtqj\") pod \"nova-cell1-conductor-0\" (UID: \"69005ae1-9200-4160-8026-7f672b8c30cc\") " pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.541614 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.611229 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.665535 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mr9x\" (UniqueName: \"kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x\") pod \"e1253aae-34a4-49c8-9beb-e94fc89c4322\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.665618 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle\") pod \"e1253aae-34a4-49c8-9beb-e94fc89c4322\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.665652 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data\") pod \"e1253aae-34a4-49c8-9beb-e94fc89c4322\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.665852 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs\") pod \"e1253aae-34a4-49c8-9beb-e94fc89c4322\" (UID: \"e1253aae-34a4-49c8-9beb-e94fc89c4322\") " Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.666711 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs" (OuterVolumeSpecName: "logs") pod "e1253aae-34a4-49c8-9beb-e94fc89c4322" (UID: "e1253aae-34a4-49c8-9beb-e94fc89c4322"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.672911 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x" (OuterVolumeSpecName: "kube-api-access-5mr9x") pod "e1253aae-34a4-49c8-9beb-e94fc89c4322" (UID: "e1253aae-34a4-49c8-9beb-e94fc89c4322"). InnerVolumeSpecName "kube-api-access-5mr9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.699263 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data" (OuterVolumeSpecName: "config-data") pod "e1253aae-34a4-49c8-9beb-e94fc89c4322" (UID: "e1253aae-34a4-49c8-9beb-e94fc89c4322"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.707697 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1253aae-34a4-49c8-9beb-e94fc89c4322" (UID: "e1253aae-34a4-49c8-9beb-e94fc89c4322"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.774787 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1253aae-34a4-49c8-9beb-e94fc89c4322-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.774810 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mr9x\" (UniqueName: \"kubernetes.io/projected/e1253aae-34a4-49c8-9beb-e94fc89c4322-kube-api-access-5mr9x\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.774844 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.774853 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1253aae-34a4-49c8-9beb-e94fc89c4322-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:27 crc kubenswrapper[4763]: I1206 08:32:27.971072 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.181556 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"69005ae1-9200-4160-8026-7f672b8c30cc","Type":"ContainerStarted","Data":"febd750929aaf78e72ef7af309b1d0f1db153fbda8e6b63ce46bb35b548191a7"} Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.181638 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"69005ae1-9200-4160-8026-7f672b8c30cc","Type":"ContainerStarted","Data":"cc78f2a1c1d15f2d13e69b55a8651bb85c56dc9898f6fa80fac348b3ed4aaa11"} Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.181688 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.183540 4763 generic.go:334] "Generic (PLEG): container finished" podID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerID="07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" exitCode=0 Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.183607 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"deb0ba5a-81b5-4dad-9a22-dc38cf94738c","Type":"ContainerDied","Data":"07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a"} Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.193485 4763 generic.go:334] "Generic (PLEG): container finished" podID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerID="d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d" exitCode=0 Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.193782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerDied","Data":"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d"} Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.194444 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1253aae-34a4-49c8-9beb-e94fc89c4322","Type":"ContainerDied","Data":"a9e6b191b06f79a074eb2538eb0c7f09705b9b31548bdffa19eaf68bae1f52da"} Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.194492 4763 scope.go:117] "RemoveContainer" containerID="d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.194618 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.194861 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.200892 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.200868271 podStartE2EDuration="1.200868271s" podCreationTimestamp="2025-12-06 08:32:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:28.198362824 +0000 UTC m=+1230.774067862" watchObservedRunningTime="2025-12-06 08:32:28.200868271 +0000 UTC m=+1230.776573309" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.264144 4763 scope.go:117] "RemoveContainer" containerID="5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.280887 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.286645 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rznpr\" (UniqueName: \"kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr\") pod \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.286714 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle\") pod \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.287037 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data\") pod \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\" (UID: \"deb0ba5a-81b5-4dad-9a22-dc38cf94738c\") " Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.315828 4763 scope.go:117] "RemoveContainer" containerID="d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d" Dec 06 08:32:28 crc kubenswrapper[4763]: E1206 08:32:28.316383 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d\": container with ID starting with d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d not found: ID does not exist" containerID="d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.316415 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d"} err="failed to get container status \"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d\": rpc error: code = NotFound desc = could not find container \"d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d\": container with ID starting with d83abeb2a9553cb9299cd4a23deff3adbe73dc1f2268f80139ea3a82460e763d not found: ID does not exist" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.316437 4763 scope.go:117] "RemoveContainer" containerID="5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850" Dec 06 08:32:28 crc kubenswrapper[4763]: E1206 08:32:28.316908 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850\": container with ID starting with 5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850 not found: ID does not exist" containerID="5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.316931 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850"} err="failed to get container status \"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850\": rpc error: code = NotFound desc = could not find container \"5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850\": container with ID starting with 5dd01b74987af4cc22e9e53f6f1d07b84f90176790a072c05866363334606850 not found: ID does not exist" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.318117 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.330148 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr" (OuterVolumeSpecName: "kube-api-access-rznpr") pod "deb0ba5a-81b5-4dad-9a22-dc38cf94738c" (UID: "deb0ba5a-81b5-4dad-9a22-dc38cf94738c"). InnerVolumeSpecName "kube-api-access-rznpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.336126 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:28 crc kubenswrapper[4763]: E1206 08:32:28.336632 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-api" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.336658 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-api" Dec 06 08:32:28 crc kubenswrapper[4763]: E1206 08:32:28.336694 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerName="nova-scheduler-scheduler" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.336704 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerName="nova-scheduler-scheduler" Dec 06 08:32:28 crc kubenswrapper[4763]: E1206 08:32:28.336743 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-log" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.336751 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-log" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.337002 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" containerName="nova-scheduler-scheduler" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.337046 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-api" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.337062 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" containerName="nova-api-log" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.340816 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data" (OuterVolumeSpecName: "config-data") pod "deb0ba5a-81b5-4dad-9a22-dc38cf94738c" (UID: "deb0ba5a-81b5-4dad-9a22-dc38cf94738c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.341411 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.343568 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.344519 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "deb0ba5a-81b5-4dad-9a22-dc38cf94738c" (UID: "deb0ba5a-81b5-4dad-9a22-dc38cf94738c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.350081 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392064 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392238 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392269 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bwb8\" (UniqueName: \"kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392346 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392722 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rznpr\" (UniqueName: \"kubernetes.io/projected/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-kube-api-access-rznpr\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392765 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.392778 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deb0ba5a-81b5-4dad-9a22-dc38cf94738c-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.493812 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.493945 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.494003 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.494022 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bwb8\" (UniqueName: \"kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.494378 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.500419 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.502952 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.513665 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bwb8\" (UniqueName: \"kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8\") pod \"nova-api-0\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " pod="openstack/nova-api-0" Dec 06 08:32:28 crc kubenswrapper[4763]: I1206 08:32:28.666475 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.237965 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.238715 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"deb0ba5a-81b5-4dad-9a22-dc38cf94738c","Type":"ContainerDied","Data":"a3658cfb06ec58f609b1b0c94b6310a376286be69762c1a5258960e82efb25ec"} Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.238754 4763 scope.go:117] "RemoveContainer" containerID="07b86cc8bbbf8c47d83bda2a73aca0677bc9ce8ee67ad8c614a6d12eec381c1a" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.241880 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:29 crc kubenswrapper[4763]: W1206 08:32:29.268854 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7526e3cd_76e3_4c19_8431_2d35553efd25.slice/crio-8c12f94a7c3825723377f22dbe03ecdebee6dd6af157fbcff72355c1947eae33 WatchSource:0}: Error finding container 8c12f94a7c3825723377f22dbe03ecdebee6dd6af157fbcff72355c1947eae33: Status 404 returned error can't find the container with id 8c12f94a7c3825723377f22dbe03ecdebee6dd6af157fbcff72355c1947eae33 Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.315372 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.337314 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.352189 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.353622 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.357343 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.365199 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.420398 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.420520 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.420568 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5vvg\" (UniqueName: \"kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.518555 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.518795 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.522673 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.522807 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.522864 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5vvg\" (UniqueName: \"kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.526859 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.527803 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.540051 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5vvg\" (UniqueName: \"kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg\") pod \"nova-scheduler-0\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.712961 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.736788 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deb0ba5a-81b5-4dad-9a22-dc38cf94738c" path="/var/lib/kubelet/pods/deb0ba5a-81b5-4dad-9a22-dc38cf94738c/volumes" Dec 06 08:32:29 crc kubenswrapper[4763]: I1206 08:32:29.737494 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1253aae-34a4-49c8-9beb-e94fc89c4322" path="/var/lib/kubelet/pods/e1253aae-34a4-49c8-9beb-e94fc89c4322/volumes" Dec 06 08:32:30 crc kubenswrapper[4763]: W1206 08:32:30.158982 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod036fe43a_e52b_4022_8a64_754e30e9c470.slice/crio-4c2d467a42c694ed9a2e7fb4f28d3a0a8d4027ef58a93908b3520c02e6bfeaf7 WatchSource:0}: Error finding container 4c2d467a42c694ed9a2e7fb4f28d3a0a8d4027ef58a93908b3520c02e6bfeaf7: Status 404 returned error can't find the container with id 4c2d467a42c694ed9a2e7fb4f28d3a0a8d4027ef58a93908b3520c02e6bfeaf7 Dec 06 08:32:30 crc kubenswrapper[4763]: I1206 08:32:30.164752 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:32:30 crc kubenswrapper[4763]: I1206 08:32:30.252103 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerStarted","Data":"4d34f8bfdc78503c523060edb052dda91383637da49ba0acb0a50ca75ee585c7"} Dec 06 08:32:30 crc kubenswrapper[4763]: I1206 08:32:30.253811 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerStarted","Data":"78712858d9dfc50a6fd5f18615f2e55e7c66346cae5ad9c66e9530c0954b333d"} Dec 06 08:32:30 crc kubenswrapper[4763]: I1206 08:32:30.253996 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerStarted","Data":"8c12f94a7c3825723377f22dbe03ecdebee6dd6af157fbcff72355c1947eae33"} Dec 06 08:32:30 crc kubenswrapper[4763]: I1206 08:32:30.256407 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"036fe43a-e52b-4022-8a64-754e30e9c470","Type":"ContainerStarted","Data":"4c2d467a42c694ed9a2e7fb4f28d3a0a8d4027ef58a93908b3520c02e6bfeaf7"} Dec 06 08:32:31 crc kubenswrapper[4763]: I1206 08:32:31.267385 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"036fe43a-e52b-4022-8a64-754e30e9c470","Type":"ContainerStarted","Data":"5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22"} Dec 06 08:32:31 crc kubenswrapper[4763]: I1206 08:32:31.287156 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.287117777 podStartE2EDuration="3.287117777s" podCreationTimestamp="2025-12-06 08:32:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:30.277206752 +0000 UTC m=+1232.852911790" watchObservedRunningTime="2025-12-06 08:32:31.287117777 +0000 UTC m=+1233.862822815" Dec 06 08:32:31 crc kubenswrapper[4763]: I1206 08:32:31.288803 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.288772682 podStartE2EDuration="2.288772682s" podCreationTimestamp="2025-12-06 08:32:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:31.285171894 +0000 UTC m=+1233.860876942" watchObservedRunningTime="2025-12-06 08:32:31.288772682 +0000 UTC m=+1233.864477730" Dec 06 08:32:31 crc kubenswrapper[4763]: I1206 08:32:31.911191 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:31 crc kubenswrapper[4763]: I1206 08:32:31.911644 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="4260e0fd-067b-4a47-8a55-0514868766aa" containerName="kube-state-metrics" containerID="cri-o://a933ad9151c407c00d004a6db317c197ecf74af4cfed750a526a0389a15fdb55" gracePeriod=30 Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.278287 4763 generic.go:334] "Generic (PLEG): container finished" podID="4260e0fd-067b-4a47-8a55-0514868766aa" containerID="a933ad9151c407c00d004a6db317c197ecf74af4cfed750a526a0389a15fdb55" exitCode=2 Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.278940 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4260e0fd-067b-4a47-8a55-0514868766aa","Type":"ContainerDied","Data":"a933ad9151c407c00d004a6db317c197ecf74af4cfed750a526a0389a15fdb55"} Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.658227 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.785860 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vqk7\" (UniqueName: \"kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7\") pod \"4260e0fd-067b-4a47-8a55-0514868766aa\" (UID: \"4260e0fd-067b-4a47-8a55-0514868766aa\") " Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.792204 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7" (OuterVolumeSpecName: "kube-api-access-8vqk7") pod "4260e0fd-067b-4a47-8a55-0514868766aa" (UID: "4260e0fd-067b-4a47-8a55-0514868766aa"). InnerVolumeSpecName "kube-api-access-8vqk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:32 crc kubenswrapper[4763]: I1206 08:32:32.888860 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vqk7\" (UniqueName: \"kubernetes.io/projected/4260e0fd-067b-4a47-8a55-0514868766aa-kube-api-access-8vqk7\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.288985 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4260e0fd-067b-4a47-8a55-0514868766aa","Type":"ContainerDied","Data":"e5d5e3b80da4aa51a09e513217fee6737b645fb9e206087957625b97dc5f0e86"} Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.289288 4763 scope.go:117] "RemoveContainer" containerID="a933ad9151c407c00d004a6db317c197ecf74af4cfed750a526a0389a15fdb55" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.289080 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.338036 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.366040 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.380631 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:33 crc kubenswrapper[4763]: E1206 08:32:33.381766 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4260e0fd-067b-4a47-8a55-0514868766aa" containerName="kube-state-metrics" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.381786 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="4260e0fd-067b-4a47-8a55-0514868766aa" containerName="kube-state-metrics" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.382800 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="4260e0fd-067b-4a47-8a55-0514868766aa" containerName="kube-state-metrics" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.390545 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.393468 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.396708 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.412522 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.513957 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.514078 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prcxw\" (UniqueName: \"kubernetes.io/projected/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-api-access-prcxw\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.514107 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.514135 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.572668 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.604138 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.616320 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.616707 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prcxw\" (UniqueName: \"kubernetes.io/projected/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-api-access-prcxw\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.616841 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.616969 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.625646 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.626303 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.627862 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.640466 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prcxw\" (UniqueName: \"kubernetes.io/projected/5102f1c3-3d66-40c5-88d3-a1e4b38cfadb-kube-api-access-prcxw\") pod \"kube-state-metrics-0\" (UID: \"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb\") " pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.719601 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 06 08:32:33 crc kubenswrapper[4763]: I1206 08:32:33.730030 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4260e0fd-067b-4a47-8a55-0514868766aa" path="/var/lib/kubelet/pods/4260e0fd-067b-4a47-8a55-0514868766aa/volumes" Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.214351 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.215244 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-central-agent" containerID="cri-o://6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e" gracePeriod=30 Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.215296 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-notification-agent" containerID="cri-o://fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3" gracePeriod=30 Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.215340 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="sg-core" containerID="cri-o://f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f" gracePeriod=30 Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.215283 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="proxy-httpd" containerID="cri-o://3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875" gracePeriod=30 Dec 06 08:32:34 crc kubenswrapper[4763]: W1206 08:32:34.252174 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5102f1c3_3d66_40c5_88d3_a1e4b38cfadb.slice/crio-a2e365cf18200ffc7514b9ae44b75483eb34dd10aed49f8058bc4995256160ef WatchSource:0}: Error finding container a2e365cf18200ffc7514b9ae44b75483eb34dd10aed49f8058bc4995256160ef: Status 404 returned error can't find the container with id a2e365cf18200ffc7514b9ae44b75483eb34dd10aed49f8058bc4995256160ef Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.252725 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.298643 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb","Type":"ContainerStarted","Data":"a2e365cf18200ffc7514b9ae44b75483eb34dd10aed49f8058bc4995256160ef"} Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.300164 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.333065 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.518117 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.518457 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 06 08:32:34 crc kubenswrapper[4763]: I1206 08:32:34.713502 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378478 4763 generic.go:334] "Generic (PLEG): container finished" podID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerID="3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875" exitCode=0 Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378514 4763 generic.go:334] "Generic (PLEG): container finished" podID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerID="f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f" exitCode=2 Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378525 4763 generic.go:334] "Generic (PLEG): container finished" podID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerID="6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e" exitCode=0 Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378554 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerDied","Data":"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875"} Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378595 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerDied","Data":"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f"} Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.378607 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerDied","Data":"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e"} Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.529050 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:35 crc kubenswrapper[4763]: I1206 08:32:35.529093 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:36 crc kubenswrapper[4763]: I1206 08:32:36.390029 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5102f1c3-3d66-40c5-88d3-a1e4b38cfadb","Type":"ContainerStarted","Data":"8c4fe3f6a1ad26ecc3621244612250923a31cf45e2532479a7931ef2bc4500f9"} Dec 06 08:32:36 crc kubenswrapper[4763]: I1206 08:32:36.390214 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 06 08:32:36 crc kubenswrapper[4763]: I1206 08:32:36.406302 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.525441569 podStartE2EDuration="3.406281405s" podCreationTimestamp="2025-12-06 08:32:33 +0000 UTC" firstStartedPulling="2025-12-06 08:32:34.254672418 +0000 UTC m=+1236.830377456" lastFinishedPulling="2025-12-06 08:32:35.135512254 +0000 UTC m=+1237.711217292" observedRunningTime="2025-12-06 08:32:36.404694043 +0000 UTC m=+1238.980399081" watchObservedRunningTime="2025-12-06 08:32:36.406281405 +0000 UTC m=+1238.981986443" Dec 06 08:32:37 crc kubenswrapper[4763]: I1206 08:32:37.652185 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 06 08:32:38 crc kubenswrapper[4763]: I1206 08:32:38.666772 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:32:38 crc kubenswrapper[4763]: I1206 08:32:38.667644 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:32:39 crc kubenswrapper[4763]: I1206 08:32:39.713281 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 06 08:32:39 crc kubenswrapper[4763]: I1206 08:32:39.746506 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 06 08:32:39 crc kubenswrapper[4763]: I1206 08:32:39.749041 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:39 crc kubenswrapper[4763]: I1206 08:32:39.749045 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.324959 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405048 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405396 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405465 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vw2n\" (UniqueName: \"kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405567 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405591 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405655 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.405687 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd\") pod \"2273ae06-63bf-4068-ad08-0b7860c4bde9\" (UID: \"2273ae06-63bf-4068-ad08-0b7860c4bde9\") " Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.408826 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.409031 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.411247 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n" (OuterVolumeSpecName: "kube-api-access-6vw2n") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "kube-api-access-6vw2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.448436 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts" (OuterVolumeSpecName: "scripts") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.460246 4763 generic.go:334] "Generic (PLEG): container finished" podID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerID="fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3" exitCode=0 Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.460399 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerDied","Data":"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3"} Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.460427 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2273ae06-63bf-4068-ad08-0b7860c4bde9","Type":"ContainerDied","Data":"c749a27cac4d5cbcb228896104dab85049572edbe3e86cad7a5e7dbc47325155"} Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.460442 4763 scope.go:117] "RemoveContainer" containerID="3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.460720 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.465544 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.497823 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.514879 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.514943 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.514958 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.514970 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2273ae06-63bf-4068-ad08-0b7860c4bde9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.514981 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vw2n\" (UniqueName: \"kubernetes.io/projected/2273ae06-63bf-4068-ad08-0b7860c4bde9-kube-api-access-6vw2n\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.519483 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.539187 4763 scope.go:117] "RemoveContainer" containerID="f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.550765 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data" (OuterVolumeSpecName: "config-data") pod "2273ae06-63bf-4068-ad08-0b7860c4bde9" (UID: "2273ae06-63bf-4068-ad08-0b7860c4bde9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.558860 4763 scope.go:117] "RemoveContainer" containerID="fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.582657 4763 scope.go:117] "RemoveContainer" containerID="6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.604589 4763 scope.go:117] "RemoveContainer" containerID="3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.604966 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875\": container with ID starting with 3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875 not found: ID does not exist" containerID="3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605002 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875"} err="failed to get container status \"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875\": rpc error: code = NotFound desc = could not find container \"3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875\": container with ID starting with 3d0a5c58531d4f4833821794bfae87dd840a1df9d4f9b3bb9ee04c8a4fc2f875 not found: ID does not exist" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605021 4763 scope.go:117] "RemoveContainer" containerID="f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.605243 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f\": container with ID starting with f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f not found: ID does not exist" containerID="f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605267 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f"} err="failed to get container status \"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f\": rpc error: code = NotFound desc = could not find container \"f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f\": container with ID starting with f8492ae584dbc8a80b372a503dce56142da5d048fde6010551e4efd07aa0443f not found: ID does not exist" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605283 4763 scope.go:117] "RemoveContainer" containerID="fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.605829 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3\": container with ID starting with fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3 not found: ID does not exist" containerID="fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605851 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3"} err="failed to get container status \"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3\": rpc error: code = NotFound desc = could not find container \"fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3\": container with ID starting with fd6b067b212427f27de026ee878dfa96e9c5f7d99b0074710fe987d8ac70adf3 not found: ID does not exist" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.605865 4763 scope.go:117] "RemoveContainer" containerID="6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.606207 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e\": container with ID starting with 6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e not found: ID does not exist" containerID="6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.606226 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e"} err="failed to get container status \"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e\": rpc error: code = NotFound desc = could not find container \"6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e\": container with ID starting with 6e6a6d7f1a3d3802e6092c4bddbf81d90b2ece823843c4faab98369f63b22c9e not found: ID does not exist" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.617240 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.617262 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2273ae06-63bf-4068-ad08-0b7860c4bde9-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.806642 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.819302 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.829942 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.830766 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="proxy-httpd" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.830784 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="proxy-httpd" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.830795 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-central-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.830801 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-central-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.830840 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-notification-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.830846 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-notification-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: E1206 08:32:40.830863 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="sg-core" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.830869 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="sg-core" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.831063 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-central-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.831078 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="ceilometer-notification-agent" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.831097 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="proxy-httpd" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.831109 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" containerName="sg-core" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.833839 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.841529 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.841934 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.843193 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.861145 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929079 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929171 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929197 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929254 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm28v\" (UniqueName: \"kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929287 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929307 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929334 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:40 crc kubenswrapper[4763]: I1206 08:32:40.929371 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031421 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031475 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031537 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm28v\" (UniqueName: \"kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031570 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031592 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031619 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031655 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.031695 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.032954 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.035113 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.035388 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.035811 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.037447 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.039123 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.048766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.053459 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm28v\" (UniqueName: \"kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v\") pod \"ceilometer-0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.164165 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:41 crc kubenswrapper[4763]: W1206 08:32:41.672156 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f32364e_980f_467c_835a_4cd072176cb0.slice/crio-aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c WatchSource:0}: Error finding container aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c: Status 404 returned error can't find the container with id aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.676750 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:41 crc kubenswrapper[4763]: I1206 08:32:41.731334 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2273ae06-63bf-4068-ad08-0b7860c4bde9" path="/var/lib/kubelet/pods/2273ae06-63bf-4068-ad08-0b7860c4bde9/volumes" Dec 06 08:32:42 crc kubenswrapper[4763]: I1206 08:32:42.482982 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerStarted","Data":"2d8662c742bc20d9ad64c1b9af85d0d5102acae432575aa41c3e5a00d287af62"} Dec 06 08:32:42 crc kubenswrapper[4763]: I1206 08:32:42.483531 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerStarted","Data":"820221b51ea1e461602886c5dd8f31a97627314c704eb323ff413fc48e7858ab"} Dec 06 08:32:42 crc kubenswrapper[4763]: I1206 08:32:42.483561 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerStarted","Data":"aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c"} Dec 06 08:32:43 crc kubenswrapper[4763]: I1206 08:32:43.495227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerStarted","Data":"22bec48f316f01d16ca86d978b34ee86213c26e22f2d050665ecde25d93f82bf"} Dec 06 08:32:43 crc kubenswrapper[4763]: I1206 08:32:43.730542 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.508016 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerStarted","Data":"a9f6e2f74a8eef61a2bece0e2c49b2a6dc4e71dc982e2e231ca1db088a9cd4a8"} Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.508654 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.524371 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.526367 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.534216 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.013164079 podStartE2EDuration="4.534198107s" podCreationTimestamp="2025-12-06 08:32:40 +0000 UTC" firstStartedPulling="2025-12-06 08:32:41.674562439 +0000 UTC m=+1244.250267477" lastFinishedPulling="2025-12-06 08:32:44.195596467 +0000 UTC m=+1246.771301505" observedRunningTime="2025-12-06 08:32:44.527800484 +0000 UTC m=+1247.103505522" watchObservedRunningTime="2025-12-06 08:32:44.534198107 +0000 UTC m=+1247.109903145" Dec 06 08:32:44 crc kubenswrapper[4763]: I1206 08:32:44.538605 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 06 08:32:45 crc kubenswrapper[4763]: I1206 08:32:45.524648 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.371423 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.449412 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data\") pod \"379b90db-1a4a-49ca-90bc-57701808262d\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.449486 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle\") pod \"379b90db-1a4a-49ca-90bc-57701808262d\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.449572 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f72lv\" (UniqueName: \"kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv\") pod \"379b90db-1a4a-49ca-90bc-57701808262d\" (UID: \"379b90db-1a4a-49ca-90bc-57701808262d\") " Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.454932 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv" (OuterVolumeSpecName: "kube-api-access-f72lv") pod "379b90db-1a4a-49ca-90bc-57701808262d" (UID: "379b90db-1a4a-49ca-90bc-57701808262d"). InnerVolumeSpecName "kube-api-access-f72lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.481609 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "379b90db-1a4a-49ca-90bc-57701808262d" (UID: "379b90db-1a4a-49ca-90bc-57701808262d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.485037 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data" (OuterVolumeSpecName: "config-data") pod "379b90db-1a4a-49ca-90bc-57701808262d" (UID: "379b90db-1a4a-49ca-90bc-57701808262d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.526723 4763 generic.go:334] "Generic (PLEG): container finished" podID="379b90db-1a4a-49ca-90bc-57701808262d" containerID="db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037" exitCode=137 Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.527779 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"379b90db-1a4a-49ca-90bc-57701808262d","Type":"ContainerDied","Data":"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037"} Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.527799 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.528153 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"379b90db-1a4a-49ca-90bc-57701808262d","Type":"ContainerDied","Data":"1fb1de741ff3e96347bf57ebc5b60b94866a822c0ffb6bcd772186b27cd6853e"} Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.528168 4763 scope.go:117] "RemoveContainer" containerID="db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.551655 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.551686 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/379b90db-1a4a-49ca-90bc-57701808262d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.551698 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f72lv\" (UniqueName: \"kubernetes.io/projected/379b90db-1a4a-49ca-90bc-57701808262d-kube-api-access-f72lv\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.580199 4763 scope.go:117] "RemoveContainer" containerID="db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037" Dec 06 08:32:46 crc kubenswrapper[4763]: E1206 08:32:46.582696 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037\": container with ID starting with db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037 not found: ID does not exist" containerID="db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.582741 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037"} err="failed to get container status \"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037\": rpc error: code = NotFound desc = could not find container \"db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037\": container with ID starting with db10ca4e075ccbe9db008fb36f72c6147990a3e7cf3708515fc9bfdb5efd3037 not found: ID does not exist" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.586220 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.593926 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.603461 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:46 crc kubenswrapper[4763]: E1206 08:32:46.604055 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="379b90db-1a4a-49ca-90bc-57701808262d" containerName="nova-cell1-novncproxy-novncproxy" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.604077 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="379b90db-1a4a-49ca-90bc-57701808262d" containerName="nova-cell1-novncproxy-novncproxy" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.604360 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="379b90db-1a4a-49ca-90bc-57701808262d" containerName="nova-cell1-novncproxy-novncproxy" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.605183 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.608137 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.608342 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.608606 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.613030 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.756103 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.756495 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q75cq\" (UniqueName: \"kubernetes.io/projected/a8059381-d8b3-4ce5-9d33-3a973651b9b3-kube-api-access-q75cq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.756623 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.756907 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.756964 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.859021 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.859092 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.859110 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q75cq\" (UniqueName: \"kubernetes.io/projected/a8059381-d8b3-4ce5-9d33-3a973651b9b3-kube-api-access-q75cq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.859141 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.859161 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.863330 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.863348 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.863945 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.864910 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8059381-d8b3-4ce5-9d33-3a973651b9b3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.877454 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q75cq\" (UniqueName: \"kubernetes.io/projected/a8059381-d8b3-4ce5-9d33-3a973651b9b3-kube-api-access-q75cq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8059381-d8b3-4ce5-9d33-3a973651b9b3\") " pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:46 crc kubenswrapper[4763]: I1206 08:32:46.925416 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:47 crc kubenswrapper[4763]: I1206 08:32:47.383873 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 06 08:32:47 crc kubenswrapper[4763]: I1206 08:32:47.538731 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8059381-d8b3-4ce5-9d33-3a973651b9b3","Type":"ContainerStarted","Data":"24da390e17b99c26329213689440ccda2e2601fb19e238c515a40e74756b7064"} Dec 06 08:32:47 crc kubenswrapper[4763]: I1206 08:32:47.731662 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="379b90db-1a4a-49ca-90bc-57701808262d" path="/var/lib/kubelet/pods/379b90db-1a4a-49ca-90bc-57701808262d/volumes" Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.546866 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8059381-d8b3-4ce5-9d33-3a973651b9b3","Type":"ContainerStarted","Data":"5e4b9aa265b45917bcc387a3b3fb05d7ea26b2893f12166c656893ab15c8e559"} Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.566886 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.566861147 podStartE2EDuration="2.566861147s" podCreationTimestamp="2025-12-06 08:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:48.561232046 +0000 UTC m=+1251.136937104" watchObservedRunningTime="2025-12-06 08:32:48.566861147 +0000 UTC m=+1251.142566185" Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.673428 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.674064 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.674189 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 06 08:32:48 crc kubenswrapper[4763]: I1206 08:32:48.683334 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.557326 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.563418 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.755502 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.763791 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.781463 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816502 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816544 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816660 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79zm7\" (UniqueName: \"kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816701 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816729 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.816786 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919163 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919223 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919313 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79zm7\" (UniqueName: \"kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919354 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919393 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.919441 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.920404 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.922356 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.923005 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.923972 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.924618 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:49 crc kubenswrapper[4763]: I1206 08:32:49.945446 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79zm7\" (UniqueName: \"kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7\") pod \"dnsmasq-dns-84bd959b5-fbkvk\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:50 crc kubenswrapper[4763]: I1206 08:32:50.096501 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:50 crc kubenswrapper[4763]: I1206 08:32:50.603497 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:32:51 crc kubenswrapper[4763]: I1206 08:32:51.585202 4763 generic.go:334] "Generic (PLEG): container finished" podID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerID="1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260" exitCode=0 Dec 06 08:32:51 crc kubenswrapper[4763]: I1206 08:32:51.585404 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" event={"ID":"c1ea635b-72f7-4abe-8996-76e8905177cf","Type":"ContainerDied","Data":"1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260"} Dec 06 08:32:51 crc kubenswrapper[4763]: I1206 08:32:51.586201 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" event={"ID":"c1ea635b-72f7-4abe-8996-76e8905177cf","Type":"ContainerStarted","Data":"8254b1427d03f98dbfe03fc46efd9fd5b9198a3fc6a31a7cc96764df48d3f1c4"} Dec 06 08:32:51 crc kubenswrapper[4763]: I1206 08:32:51.925487 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.089179 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.089739 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-central-agent" containerID="cri-o://820221b51ea1e461602886c5dd8f31a97627314c704eb323ff413fc48e7858ab" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.089826 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="proxy-httpd" containerID="cri-o://a9f6e2f74a8eef61a2bece0e2c49b2a6dc4e71dc982e2e231ca1db088a9cd4a8" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.089870 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="sg-core" containerID="cri-o://22bec48f316f01d16ca86d978b34ee86213c26e22f2d050665ecde25d93f82bf" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.089862 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-notification-agent" containerID="cri-o://2d8662c742bc20d9ad64c1b9af85d0d5102acae432575aa41c3e5a00d287af62" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.455262 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.597175 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" event={"ID":"c1ea635b-72f7-4abe-8996-76e8905177cf","Type":"ContainerStarted","Data":"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac"} Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.597332 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604111 4763 generic.go:334] "Generic (PLEG): container finished" podID="3f32364e-980f-467c-835a-4cd072176cb0" containerID="a9f6e2f74a8eef61a2bece0e2c49b2a6dc4e71dc982e2e231ca1db088a9cd4a8" exitCode=0 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604139 4763 generic.go:334] "Generic (PLEG): container finished" podID="3f32364e-980f-467c-835a-4cd072176cb0" containerID="22bec48f316f01d16ca86d978b34ee86213c26e22f2d050665ecde25d93f82bf" exitCode=2 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604212 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerDied","Data":"a9f6e2f74a8eef61a2bece0e2c49b2a6dc4e71dc982e2e231ca1db088a9cd4a8"} Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604280 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerDied","Data":"22bec48f316f01d16ca86d978b34ee86213c26e22f2d050665ecde25d93f82bf"} Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604350 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-log" containerID="cri-o://78712858d9dfc50a6fd5f18615f2e55e7c66346cae5ad9c66e9530c0954b333d" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.604501 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-api" containerID="cri-o://4d34f8bfdc78503c523060edb052dda91383637da49ba0acb0a50ca75ee585c7" gracePeriod=30 Dec 06 08:32:52 crc kubenswrapper[4763]: I1206 08:32:52.620391 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" podStartSLOduration=3.620375621 podStartE2EDuration="3.620375621s" podCreationTimestamp="2025-12-06 08:32:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:52.619597981 +0000 UTC m=+1255.195303009" watchObservedRunningTime="2025-12-06 08:32:52.620375621 +0000 UTC m=+1255.196080659" Dec 06 08:32:53 crc kubenswrapper[4763]: I1206 08:32:53.617316 4763 generic.go:334] "Generic (PLEG): container finished" podID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerID="78712858d9dfc50a6fd5f18615f2e55e7c66346cae5ad9c66e9530c0954b333d" exitCode=143 Dec 06 08:32:53 crc kubenswrapper[4763]: I1206 08:32:53.617506 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerDied","Data":"78712858d9dfc50a6fd5f18615f2e55e7c66346cae5ad9c66e9530c0954b333d"} Dec 06 08:32:53 crc kubenswrapper[4763]: I1206 08:32:53.623316 4763 generic.go:334] "Generic (PLEG): container finished" podID="3f32364e-980f-467c-835a-4cd072176cb0" containerID="820221b51ea1e461602886c5dd8f31a97627314c704eb323ff413fc48e7858ab" exitCode=0 Dec 06 08:32:53 crc kubenswrapper[4763]: I1206 08:32:53.624483 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerDied","Data":"820221b51ea1e461602886c5dd8f31a97627314c704eb323ff413fc48e7858ab"} Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.636796 4763 generic.go:334] "Generic (PLEG): container finished" podID="3f32364e-980f-467c-835a-4cd072176cb0" containerID="2d8662c742bc20d9ad64c1b9af85d0d5102acae432575aa41c3e5a00d287af62" exitCode=0 Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.636866 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerDied","Data":"2d8662c742bc20d9ad64c1b9af85d0d5102acae432575aa41c3e5a00d287af62"} Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.637401 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f32364e-980f-467c-835a-4cd072176cb0","Type":"ContainerDied","Data":"aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c"} Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.637420 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aba785997a2753b89b95be8e9a9290470da7ace77c8dd39d044f6ab9ebb0a19c" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.637098 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.640400 4763 generic.go:334] "Generic (PLEG): container finished" podID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerID="4d34f8bfdc78503c523060edb052dda91383637da49ba0acb0a50ca75ee585c7" exitCode=0 Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.640434 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerDied","Data":"4d34f8bfdc78503c523060edb052dda91383637da49ba0acb0a50ca75ee585c7"} Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729410 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729554 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729644 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm28v\" (UniqueName: \"kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729715 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729741 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729816 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729886 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.729985 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd\") pod \"3f32364e-980f-467c-835a-4cd072176cb0\" (UID: \"3f32364e-980f-467c-835a-4cd072176cb0\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.731118 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.731704 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.736079 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v" (OuterVolumeSpecName: "kube-api-access-dm28v") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "kube-api-access-dm28v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.736640 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts" (OuterVolumeSpecName: "scripts") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.751741 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.775430 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.826663 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.831643 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data\") pod \"7526e3cd-76e3-4c19-8431-2d35553efd25\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.831745 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bwb8\" (UniqueName: \"kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8\") pod \"7526e3cd-76e3-4c19-8431-2d35553efd25\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.831891 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle\") pod \"7526e3cd-76e3-4c19-8431-2d35553efd25\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.831978 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs\") pod \"7526e3cd-76e3-4c19-8431-2d35553efd25\" (UID: \"7526e3cd-76e3-4c19-8431-2d35553efd25\") " Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832354 4763 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832365 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832374 4763 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832382 4763 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f32364e-980f-467c-835a-4cd072176cb0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832392 4763 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832400 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm28v\" (UniqueName: \"kubernetes.io/projected/3f32364e-980f-467c-835a-4cd072176cb0-kube-api-access-dm28v\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.832783 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs" (OuterVolumeSpecName: "logs") pod "7526e3cd-76e3-4c19-8431-2d35553efd25" (UID: "7526e3cd-76e3-4c19-8431-2d35553efd25"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.836177 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8" (OuterVolumeSpecName: "kube-api-access-4bwb8") pod "7526e3cd-76e3-4c19-8431-2d35553efd25" (UID: "7526e3cd-76e3-4c19-8431-2d35553efd25"). InnerVolumeSpecName "kube-api-access-4bwb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.865052 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.881001 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data" (OuterVolumeSpecName: "config-data") pod "7526e3cd-76e3-4c19-8431-2d35553efd25" (UID: "7526e3cd-76e3-4c19-8431-2d35553efd25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.885532 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7526e3cd-76e3-4c19-8431-2d35553efd25" (UID: "7526e3cd-76e3-4c19-8431-2d35553efd25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.885684 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data" (OuterVolumeSpecName: "config-data") pod "3f32364e-980f-467c-835a-4cd072176cb0" (UID: "3f32364e-980f-467c-835a-4cd072176cb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.934548 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.934579 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7526e3cd-76e3-4c19-8431-2d35553efd25-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.934590 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.935166 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7526e3cd-76e3-4c19-8431-2d35553efd25-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.935190 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f32364e-980f-467c-835a-4cd072176cb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:54 crc kubenswrapper[4763]: I1206 08:32:54.935199 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bwb8\" (UniqueName: \"kubernetes.io/projected/7526e3cd-76e3-4c19-8431-2d35553efd25-kube-api-access-4bwb8\") on node \"crc\" DevicePath \"\"" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.655364 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.655373 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.655388 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7526e3cd-76e3-4c19-8431-2d35553efd25","Type":"ContainerDied","Data":"8c12f94a7c3825723377f22dbe03ecdebee6dd6af157fbcff72355c1947eae33"} Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.655835 4763 scope.go:117] "RemoveContainer" containerID="4d34f8bfdc78503c523060edb052dda91383637da49ba0acb0a50ca75ee585c7" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.696268 4763 scope.go:117] "RemoveContainer" containerID="78712858d9dfc50a6fd5f18615f2e55e7c66346cae5ad9c66e9530c0954b333d" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.701983 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.742003 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.742602 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.774047 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.788265 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791235 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="sg-core" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791265 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="sg-core" Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791284 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-central-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791291 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-central-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791304 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-log" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791311 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-log" Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791326 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-api" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791332 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-api" Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791344 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="proxy-httpd" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791350 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="proxy-httpd" Dec 06 08:32:55 crc kubenswrapper[4763]: E1206 08:32:55.791365 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-notification-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791370 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-notification-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791588 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-notification-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791602 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="ceilometer-central-agent" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791611 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="sg-core" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791628 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f32364e-980f-467c-835a-4cd072176cb0" containerName="proxy-httpd" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791634 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-api" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.791650 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" containerName="nova-api-log" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.793662 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.798267 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.798515 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.798669 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.798791 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.800879 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.804042 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.804074 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.804230 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.804350 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.825787 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852238 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-log-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852499 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852587 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852842 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852919 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.852975 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-644wv\" (UniqueName: \"kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853056 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853133 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-scripts\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853165 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-run-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853233 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24rnh\" (UniqueName: \"kubernetes.io/projected/3c6e79b1-2945-4c24-918e-9a955cfae046-kube-api-access-24rnh\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853397 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-config-data\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853441 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853587 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.853748 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.955869 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956067 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956188 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956339 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-log-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956428 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956501 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956570 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956637 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956713 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-644wv\" (UniqueName: \"kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956802 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956913 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-scripts\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.956993 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-run-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.957075 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24rnh\" (UniqueName: \"kubernetes.io/projected/3c6e79b1-2945-4c24-918e-9a955cfae046-kube-api-access-24rnh\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.957166 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-config-data\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.958273 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.958643 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-log-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.958872 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3c6e79b1-2945-4c24-918e-9a955cfae046-run-httpd\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.964767 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.965115 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.965501 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.966050 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-config-data\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.975592 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.996589 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.997161 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.997526 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:55 crc kubenswrapper[4763]: I1206 08:32:55.998173 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c6e79b1-2945-4c24-918e-9a955cfae046-scripts\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.017040 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-644wv\" (UniqueName: \"kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv\") pod \"nova-api-0\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " pod="openstack/nova-api-0" Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.018655 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24rnh\" (UniqueName: \"kubernetes.io/projected/3c6e79b1-2945-4c24-918e-9a955cfae046-kube-api-access-24rnh\") pod \"ceilometer-0\" (UID: \"3c6e79b1-2945-4c24-918e-9a955cfae046\") " pod="openstack/ceilometer-0" Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.133552 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.148175 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:32:56 crc kubenswrapper[4763]: W1206 08:32:56.610124 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c6e79b1_2945_4c24_918e_9a955cfae046.slice/crio-5dfb31071c31f179a4a8d325cc16fdf0d0f6292f6ce80efb085460b965bbff72 WatchSource:0}: Error finding container 5dfb31071c31f179a4a8d325cc16fdf0d0f6292f6ce80efb085460b965bbff72: Status 404 returned error can't find the container with id 5dfb31071c31f179a4a8d325cc16fdf0d0f6292f6ce80efb085460b965bbff72 Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.619451 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.672561 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3c6e79b1-2945-4c24-918e-9a955cfae046","Type":"ContainerStarted","Data":"5dfb31071c31f179a4a8d325cc16fdf0d0f6292f6ce80efb085460b965bbff72"} Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.751878 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.925725 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:56 crc kubenswrapper[4763]: I1206 08:32:56.947410 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.685175 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3c6e79b1-2945-4c24-918e-9a955cfae046","Type":"ContainerStarted","Data":"1cef66311ee0a4c248008254c2650811c89ee01abf09682a39d7ce671367183a"} Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.685544 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3c6e79b1-2945-4c24-918e-9a955cfae046","Type":"ContainerStarted","Data":"a9393d14f86c108783bd8416924cf88f956840f799029bc037582cd5ed6960af"} Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.689198 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerStarted","Data":"1ed45084bd71c74db6c7f9bca52e87af7f665fb75e8d912f163df745827bce00"} Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.689263 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerStarted","Data":"00d0c2682d170eb53fa923f03323b7dcfb3a65c245fe07d35cecfde60525c4fb"} Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.689280 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerStarted","Data":"003cc00f09baffc90f26f9a6335145d743b72f6b38cf135ca25c740d133149e8"} Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.705433 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.714418 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.71439237 podStartE2EDuration="2.71439237s" podCreationTimestamp="2025-12-06 08:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:57.707085203 +0000 UTC m=+1260.282790251" watchObservedRunningTime="2025-12-06 08:32:57.71439237 +0000 UTC m=+1260.290097418" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.753765 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f32364e-980f-467c-835a-4cd072176cb0" path="/var/lib/kubelet/pods/3f32364e-980f-467c-835a-4cd072176cb0/volumes" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.754732 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7526e3cd-76e3-4c19-8431-2d35553efd25" path="/var/lib/kubelet/pods/7526e3cd-76e3-4c19-8431-2d35553efd25/volumes" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.878481 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-2drns"] Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.880065 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.882739 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.883718 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 06 08:32:57 crc kubenswrapper[4763]: I1206 08:32:57.887999 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2drns"] Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.012550 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.012601 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.012623 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.012737 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjs8k\" (UniqueName: \"kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.118883 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.119005 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.119045 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.119334 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjs8k\" (UniqueName: \"kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.127259 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.127738 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.139601 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjs8k\" (UniqueName: \"kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.141277 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2drns\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.318840 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.704288 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3c6e79b1-2945-4c24-918e-9a955cfae046","Type":"ContainerStarted","Data":"cc984c9766b1cea16bdd4a321b48eba5a2017236e59dd3a6371786620876796f"} Dec 06 08:32:58 crc kubenswrapper[4763]: I1206 08:32:58.790288 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2drns"] Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.733100 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3c6e79b1-2945-4c24-918e-9a955cfae046","Type":"ContainerStarted","Data":"259c995e89059014a0e7021a41807f733c8345db677701018a1f5d3ceab53e8d"} Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.733808 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.734537 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2drns" event={"ID":"d95b1c06-d160-4443-82ab-80bc512a4fba","Type":"ContainerStarted","Data":"55b99a70902a11c8fbd76781ad65c120fadbffaff96c1b0b96932d1e5c3d2d43"} Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.734560 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2drns" event={"ID":"d95b1c06-d160-4443-82ab-80bc512a4fba","Type":"ContainerStarted","Data":"8cc33a2d7c6799d2564acfabbb2a8e4c1766bee04c497e43cd55d78d4c6d473e"} Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.751425 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.961293973 podStartE2EDuration="4.751406864s" podCreationTimestamp="2025-12-06 08:32:55 +0000 UTC" firstStartedPulling="2025-12-06 08:32:56.617160904 +0000 UTC m=+1259.192865942" lastFinishedPulling="2025-12-06 08:32:59.407273805 +0000 UTC m=+1261.982978833" observedRunningTime="2025-12-06 08:32:59.745946287 +0000 UTC m=+1262.321651345" watchObservedRunningTime="2025-12-06 08:32:59.751406864 +0000 UTC m=+1262.327111902" Dec 06 08:32:59 crc kubenswrapper[4763]: I1206 08:32:59.771285 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-2drns" podStartSLOduration=2.7712635199999998 podStartE2EDuration="2.77126352s" podCreationTimestamp="2025-12-06 08:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:32:59.769101152 +0000 UTC m=+1262.344806190" watchObservedRunningTime="2025-12-06 08:32:59.77126352 +0000 UTC m=+1262.346968578" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.099082 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.169087 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.169312 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="dnsmasq-dns" containerID="cri-o://f0712d5b17eff9cfa0b28eee4e4d3b4fde40c63ae551fa97facc50762002bb36" gracePeriod=10 Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.745463 4763 generic.go:334] "Generic (PLEG): container finished" podID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerID="f0712d5b17eff9cfa0b28eee4e4d3b4fde40c63ae551fa97facc50762002bb36" exitCode=0 Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.745603 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" event={"ID":"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2","Type":"ContainerDied","Data":"f0712d5b17eff9cfa0b28eee4e4d3b4fde40c63ae551fa97facc50762002bb36"} Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.746059 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" event={"ID":"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2","Type":"ContainerDied","Data":"bd4ba51d25b39fbb0a1415a3879f8c90044d74bd67d2ee1f8b8289adf9d6bcb7"} Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.746080 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd4ba51d25b39fbb0a1415a3879f8c90044d74bd67d2ee1f8b8289adf9d6bcb7" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.781689 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.886975 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.887014 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.887052 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.887077 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc6p6\" (UniqueName: \"kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.887101 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.887156 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config\") pod \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\" (UID: \"2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2\") " Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.896315 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6" (OuterVolumeSpecName: "kube-api-access-fc6p6") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "kube-api-access-fc6p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.947828 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.955503 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.959009 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.964467 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config" (OuterVolumeSpecName: "config") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.966382 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" (UID: "2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989304 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989524 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989598 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989654 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989709 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc6p6\" (UniqueName: \"kubernetes.io/projected/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-kube-api-access-fc6p6\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:00 crc kubenswrapper[4763]: I1206 08:33:00.989771 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:01 crc kubenswrapper[4763]: I1206 08:33:01.754353 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7fb879d9-9gtrm" Dec 06 08:33:01 crc kubenswrapper[4763]: I1206 08:33:01.782399 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:33:01 crc kubenswrapper[4763]: I1206 08:33:01.810637 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b7fb879d9-9gtrm"] Dec 06 08:33:03 crc kubenswrapper[4763]: I1206 08:33:03.733663 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" path="/var/lib/kubelet/pods/2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2/volumes" Dec 06 08:33:04 crc kubenswrapper[4763]: I1206 08:33:04.787165 4763 generic.go:334] "Generic (PLEG): container finished" podID="d95b1c06-d160-4443-82ab-80bc512a4fba" containerID="55b99a70902a11c8fbd76781ad65c120fadbffaff96c1b0b96932d1e5c3d2d43" exitCode=0 Dec 06 08:33:04 crc kubenswrapper[4763]: I1206 08:33:04.787213 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2drns" event={"ID":"d95b1c06-d160-4443-82ab-80bc512a4fba","Type":"ContainerDied","Data":"55b99a70902a11c8fbd76781ad65c120fadbffaff96c1b0b96932d1e5c3d2d43"} Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.149496 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.149842 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.176258 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.197734 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data\") pod \"d95b1c06-d160-4443-82ab-80bc512a4fba\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.197766 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle\") pod \"d95b1c06-d160-4443-82ab-80bc512a4fba\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.197881 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts\") pod \"d95b1c06-d160-4443-82ab-80bc512a4fba\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.197929 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjs8k\" (UniqueName: \"kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k\") pod \"d95b1c06-d160-4443-82ab-80bc512a4fba\" (UID: \"d95b1c06-d160-4443-82ab-80bc512a4fba\") " Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.205831 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts" (OuterVolumeSpecName: "scripts") pod "d95b1c06-d160-4443-82ab-80bc512a4fba" (UID: "d95b1c06-d160-4443-82ab-80bc512a4fba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.214664 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k" (OuterVolumeSpecName: "kube-api-access-bjs8k") pod "d95b1c06-d160-4443-82ab-80bc512a4fba" (UID: "d95b1c06-d160-4443-82ab-80bc512a4fba"). InnerVolumeSpecName "kube-api-access-bjs8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.233724 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d95b1c06-d160-4443-82ab-80bc512a4fba" (UID: "d95b1c06-d160-4443-82ab-80bc512a4fba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.245731 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data" (OuterVolumeSpecName: "config-data") pod "d95b1c06-d160-4443-82ab-80bc512a4fba" (UID: "d95b1c06-d160-4443-82ab-80bc512a4fba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.301043 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.301087 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.301100 4763 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d95b1c06-d160-4443-82ab-80bc512a4fba-scripts\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.301109 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjs8k\" (UniqueName: \"kubernetes.io/projected/d95b1c06-d160-4443-82ab-80bc512a4fba-kube-api-access-bjs8k\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.847442 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2drns" event={"ID":"d95b1c06-d160-4443-82ab-80bc512a4fba","Type":"ContainerDied","Data":"8cc33a2d7c6799d2564acfabbb2a8e4c1766bee04c497e43cd55d78d4c6d473e"} Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.847975 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cc33a2d7c6799d2564acfabbb2a8e4c1766bee04c497e43cd55d78d4c6d473e" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.847750 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2drns" Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.996627 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.996974 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-log" containerID="cri-o://00d0c2682d170eb53fa923f03323b7dcfb3a65c245fe07d35cecfde60525c4fb" gracePeriod=30 Dec 06 08:33:06 crc kubenswrapper[4763]: I1206 08:33:06.997077 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-api" containerID="cri-o://1ed45084bd71c74db6c7f9bca52e87af7f665fb75e8d912f163df745827bce00" gracePeriod=30 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.011064 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.219:8774/\": EOF" Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.011504 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.011846 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" containerName="nova-scheduler-scheduler" containerID="cri-o://5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" gracePeriod=30 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.029637 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.029986 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-log" containerID="cri-o://4b106e7e443b52ddc623381fc4726fba765334377aca316017e5a5caf2620225" gracePeriod=30 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.030156 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-metadata" containerID="cri-o://5bcfcd5834b3a13a164ccf04fe2a086ca69c66f987aeef361d2445fcfb708ea0" gracePeriod=30 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.044520 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.219:8774/\": EOF" Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.862008 4763 generic.go:334] "Generic (PLEG): container finished" podID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerID="4b106e7e443b52ddc623381fc4726fba765334377aca316017e5a5caf2620225" exitCode=143 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.862089 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerDied","Data":"4b106e7e443b52ddc623381fc4726fba765334377aca316017e5a5caf2620225"} Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.863756 4763 generic.go:334] "Generic (PLEG): container finished" podID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerID="00d0c2682d170eb53fa923f03323b7dcfb3a65c245fe07d35cecfde60525c4fb" exitCode=143 Dec 06 08:33:07 crc kubenswrapper[4763]: I1206 08:33:07.863782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerDied","Data":"00d0c2682d170eb53fa923f03323b7dcfb3a65c245fe07d35cecfde60525c4fb"} Dec 06 08:33:08 crc kubenswrapper[4763]: I1206 08:33:08.888626 4763 generic.go:334] "Generic (PLEG): container finished" podID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerID="5bcfcd5834b3a13a164ccf04fe2a086ca69c66f987aeef361d2445fcfb708ea0" exitCode=0 Dec 06 08:33:08 crc kubenswrapper[4763]: I1206 08:33:08.888708 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerDied","Data":"5bcfcd5834b3a13a164ccf04fe2a086ca69c66f987aeef361d2445fcfb708ea0"} Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.134062 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.184319 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrkdj\" (UniqueName: \"kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj\") pod \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.184484 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs\") pod \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.184548 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs\") pod \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.184640 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle\") pod \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.184785 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data\") pod \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\" (UID: \"7c175d78-39a6-4a2c-a6a2-f702d982cf08\") " Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.186837 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs" (OuterVolumeSpecName: "logs") pod "7c175d78-39a6-4a2c-a6a2-f702d982cf08" (UID: "7c175d78-39a6-4a2c-a6a2-f702d982cf08"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.211204 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj" (OuterVolumeSpecName: "kube-api-access-nrkdj") pod "7c175d78-39a6-4a2c-a6a2-f702d982cf08" (UID: "7c175d78-39a6-4a2c-a6a2-f702d982cf08"). InnerVolumeSpecName "kube-api-access-nrkdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.241668 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data" (OuterVolumeSpecName: "config-data") pod "7c175d78-39a6-4a2c-a6a2-f702d982cf08" (UID: "7c175d78-39a6-4a2c-a6a2-f702d982cf08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.263142 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c175d78-39a6-4a2c-a6a2-f702d982cf08" (UID: "7c175d78-39a6-4a2c-a6a2-f702d982cf08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.272643 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7c175d78-39a6-4a2c-a6a2-f702d982cf08" (UID: "7c175d78-39a6-4a2c-a6a2-f702d982cf08"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.289211 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.289256 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrkdj\" (UniqueName: \"kubernetes.io/projected/7c175d78-39a6-4a2c-a6a2-f702d982cf08-kube-api-access-nrkdj\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.289269 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c175d78-39a6-4a2c-a6a2-f702d982cf08-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.289281 4763 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.289292 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c175d78-39a6-4a2c-a6a2-f702d982cf08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:09 crc kubenswrapper[4763]: E1206 08:33:09.714033 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22 is running failed: container process not found" containerID="5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:33:09 crc kubenswrapper[4763]: E1206 08:33:09.714579 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22 is running failed: container process not found" containerID="5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:33:09 crc kubenswrapper[4763]: E1206 08:33:09.717077 4763 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22 is running failed: container process not found" containerID="5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 06 08:33:09 crc kubenswrapper[4763]: E1206 08:33:09.717155 4763 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" containerName="nova-scheduler-scheduler" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.900504 4763 generic.go:334] "Generic (PLEG): container finished" podID="036fe43a-e52b-4022-8a64-754e30e9c470" containerID="5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" exitCode=0 Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.900591 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"036fe43a-e52b-4022-8a64-754e30e9c470","Type":"ContainerDied","Data":"5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22"} Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.902342 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7c175d78-39a6-4a2c-a6a2-f702d982cf08","Type":"ContainerDied","Data":"b3b8cd58c42bf4b54ccbf78451265b09ad2c9246092471188944e2163566568d"} Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.902401 4763 scope.go:117] "RemoveContainer" containerID="5bcfcd5834b3a13a164ccf04fe2a086ca69c66f987aeef361d2445fcfb708ea0" Dec 06 08:33:09 crc kubenswrapper[4763]: I1206 08:33:09.902418 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.013869 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.016396 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data\") pod \"036fe43a-e52b-4022-8a64-754e30e9c470\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.016468 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5vvg\" (UniqueName: \"kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg\") pod \"036fe43a-e52b-4022-8a64-754e30e9c470\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.032111 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg" (OuterVolumeSpecName: "kube-api-access-p5vvg") pod "036fe43a-e52b-4022-8a64-754e30e9c470" (UID: "036fe43a-e52b-4022-8a64-754e30e9c470"). InnerVolumeSpecName "kube-api-access-p5vvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.040463 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.042954 4763 scope.go:117] "RemoveContainer" containerID="4b106e7e443b52ddc623381fc4726fba765334377aca316017e5a5caf2620225" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.062820 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.084451 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.084539 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data" (OuterVolumeSpecName: "config-data") pod "036fe43a-e52b-4022-8a64-754e30e9c470" (UID: "036fe43a-e52b-4022-8a64-754e30e9c470"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098294 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-metadata" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098334 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-metadata" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098382 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="dnsmasq-dns" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098389 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="dnsmasq-dns" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098434 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="init" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098440 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="init" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098463 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d95b1c06-d160-4443-82ab-80bc512a4fba" containerName="nova-manage" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098471 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d95b1c06-d160-4443-82ab-80bc512a4fba" containerName="nova-manage" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098486 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" containerName="nova-scheduler-scheduler" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098492 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" containerName="nova-scheduler-scheduler" Dec 06 08:33:10 crc kubenswrapper[4763]: E1206 08:33:10.098518 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-log" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.098525 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-log" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.099212 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2126f6b2-12b1-493d-8e3e-e3fb5c7b3df2" containerName="dnsmasq-dns" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.099237 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" containerName="nova-scheduler-scheduler" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.099256 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d95b1c06-d160-4443-82ab-80bc512a4fba" containerName="nova-manage" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.099270 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-log" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.099294 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" containerName="nova-metadata-metadata" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.113074 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.116171 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.116288 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.118993 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle\") pod \"036fe43a-e52b-4022-8a64-754e30e9c470\" (UID: \"036fe43a-e52b-4022-8a64-754e30e9c470\") " Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.119506 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.120000 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-config-data\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.120064 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db2b9f5c-c7d4-446f-a749-729c24b6ce44-logs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.120208 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.120282 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqc2b\" (UniqueName: \"kubernetes.io/projected/db2b9f5c-c7d4-446f-a749-729c24b6ce44-kube-api-access-sqc2b\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.120313 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.121105 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.121134 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5vvg\" (UniqueName: \"kubernetes.io/projected/036fe43a-e52b-4022-8a64-754e30e9c470-kube-api-access-p5vvg\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.172262 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "036fe43a-e52b-4022-8a64-754e30e9c470" (UID: "036fe43a-e52b-4022-8a64-754e30e9c470"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.222326 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.222397 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqc2b\" (UniqueName: \"kubernetes.io/projected/db2b9f5c-c7d4-446f-a749-729c24b6ce44-kube-api-access-sqc2b\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.222476 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.223686 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-config-data\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.223756 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db2b9f5c-c7d4-446f-a749-729c24b6ce44-logs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.224105 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db2b9f5c-c7d4-446f-a749-729c24b6ce44-logs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.224217 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/036fe43a-e52b-4022-8a64-754e30e9c470-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.226337 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.226349 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.227024 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db2b9f5c-c7d4-446f-a749-729c24b6ce44-config-data\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.245138 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqc2b\" (UniqueName: \"kubernetes.io/projected/db2b9f5c-c7d4-446f-a749-729c24b6ce44-kube-api-access-sqc2b\") pod \"nova-metadata-0\" (UID: \"db2b9f5c-c7d4-446f-a749-729c24b6ce44\") " pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.456173 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.913225 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"036fe43a-e52b-4022-8a64-754e30e9c470","Type":"ContainerDied","Data":"4c2d467a42c694ed9a2e7fb4f28d3a0a8d4027ef58a93908b3520c02e6bfeaf7"} Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.913504 4763 scope.go:117] "RemoveContainer" containerID="5202e167bc2c4ce385d4092c2293b83e9f36cb165a8e7dd16d8af59523aa2d22" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.913286 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.949119 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.968290 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:10 crc kubenswrapper[4763]: I1206 08:33:10.994782 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.027079 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.029018 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.037371 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.041252 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-config-data\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.041355 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f99xw\" (UniqueName: \"kubernetes.io/projected/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-kube-api-access-f99xw\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.041400 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.044360 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.144590 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-config-data\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.144942 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f99xw\" (UniqueName: \"kubernetes.io/projected/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-kube-api-access-f99xw\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.145065 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.154300 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.157537 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-config-data\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.171756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f99xw\" (UniqueName: \"kubernetes.io/projected/a81419ed-3356-4a60-8bfa-b1cb2cfb5080-kube-api-access-f99xw\") pod \"nova-scheduler-0\" (UID: \"a81419ed-3356-4a60-8bfa-b1cb2cfb5080\") " pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.413559 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.731527 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="036fe43a-e52b-4022-8a64-754e30e9c470" path="/var/lib/kubelet/pods/036fe43a-e52b-4022-8a64-754e30e9c470/volumes" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.732404 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c175d78-39a6-4a2c-a6a2-f702d982cf08" path="/var/lib/kubelet/pods/7c175d78-39a6-4a2c-a6a2-f702d982cf08/volumes" Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.839590 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 06 08:33:11 crc kubenswrapper[4763]: W1206 08:33:11.844494 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda81419ed_3356_4a60_8bfa_b1cb2cfb5080.slice/crio-4287adfc3069fef6804f5c07b64be0499196d63c3a0ec91c4bee20352f9b29e9 WatchSource:0}: Error finding container 4287adfc3069fef6804f5c07b64be0499196d63c3a0ec91c4bee20352f9b29e9: Status 404 returned error can't find the container with id 4287adfc3069fef6804f5c07b64be0499196d63c3a0ec91c4bee20352f9b29e9 Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.929477 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"db2b9f5c-c7d4-446f-a749-729c24b6ce44","Type":"ContainerStarted","Data":"ec30e599893bc09a1fa58ddea7b43d3cd94bf3dd8ca81e9e544150da4dff0172"} Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.929652 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"db2b9f5c-c7d4-446f-a749-729c24b6ce44","Type":"ContainerStarted","Data":"f241ad906ff559aef92555f2a94979e7d7f786b07607e0244594035d3bedd10b"} Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.929669 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"db2b9f5c-c7d4-446f-a749-729c24b6ce44","Type":"ContainerStarted","Data":"6c4dcf4b44ad492a60f1b79b345493d91bc3f44f6901cb2837bc9a8ebe42300e"} Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.935189 4763 generic.go:334] "Generic (PLEG): container finished" podID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerID="1ed45084bd71c74db6c7f9bca52e87af7f665fb75e8d912f163df745827bce00" exitCode=0 Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.935240 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerDied","Data":"1ed45084bd71c74db6c7f9bca52e87af7f665fb75e8d912f163df745827bce00"} Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.936409 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a81419ed-3356-4a60-8bfa-b1cb2cfb5080","Type":"ContainerStarted","Data":"4287adfc3069fef6804f5c07b64be0499196d63c3a0ec91c4bee20352f9b29e9"} Dec 06 08:33:11 crc kubenswrapper[4763]: I1206 08:33:11.964618 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.9645991569999999 podStartE2EDuration="1.964599157s" podCreationTimestamp="2025-12-06 08:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:33:11.953832866 +0000 UTC m=+1274.529537924" watchObservedRunningTime="2025-12-06 08:33:11.964599157 +0000 UTC m=+1274.540304195" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.318737 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.479311 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.479412 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.479857 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs" (OuterVolumeSpecName: "logs") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.480184 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.480229 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-644wv\" (UniqueName: \"kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.480299 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.480403 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data\") pod \"d978f0dd-2184-4133-a1c8-9df662a6b021\" (UID: \"d978f0dd-2184-4133-a1c8-9df662a6b021\") " Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.481049 4763 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d978f0dd-2184-4133-a1c8-9df662a6b021-logs\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.504225 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv" (OuterVolumeSpecName: "kube-api-access-644wv") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "kube-api-access-644wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.577105 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data" (OuterVolumeSpecName: "config-data") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.602552 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-644wv\" (UniqueName: \"kubernetes.io/projected/d978f0dd-2184-4133-a1c8-9df662a6b021-kube-api-access-644wv\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.602575 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.618051 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.671132 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.678765 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d978f0dd-2184-4133-a1c8-9df662a6b021" (UID: "d978f0dd-2184-4133-a1c8-9df662a6b021"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.704734 4763 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.704768 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.704781 4763 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d978f0dd-2184-4133-a1c8-9df662a6b021-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.949623 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d978f0dd-2184-4133-a1c8-9df662a6b021","Type":"ContainerDied","Data":"003cc00f09baffc90f26f9a6335145d743b72f6b38cf135ca25c740d133149e8"} Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.949675 4763 scope.go:117] "RemoveContainer" containerID="1ed45084bd71c74db6c7f9bca52e87af7f665fb75e8d912f163df745827bce00" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.949745 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.951076 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a81419ed-3356-4a60-8bfa-b1cb2cfb5080","Type":"ContainerStarted","Data":"1b593731d8e634a8f3505be138f6fa651ff134e9c9f843bc6aafb89e53df8fb2"} Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.972872 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.972854451 podStartE2EDuration="2.972854451s" podCreationTimestamp="2025-12-06 08:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:33:12.966429659 +0000 UTC m=+1275.542134697" watchObservedRunningTime="2025-12-06 08:33:12.972854451 +0000 UTC m=+1275.548559489" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.977739 4763 scope.go:117] "RemoveContainer" containerID="00d0c2682d170eb53fa923f03323b7dcfb3a65c245fe07d35cecfde60525c4fb" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:12.999070 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.014687 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.038759 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:13 crc kubenswrapper[4763]: E1206 08:33:13.039296 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-log" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.039310 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-log" Dec 06 08:33:13 crc kubenswrapper[4763]: E1206 08:33:13.039329 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-api" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.039335 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-api" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.039519 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-log" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.039532 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" containerName="nova-api-api" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.040653 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.049164 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.051613 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.051831 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.051848 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.111953 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-config-data\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.112044 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/781aaaf4-b6c0-4c1e-be97-725cd631e120-logs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.112074 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.112088 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-internal-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.112396 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb5h4\" (UniqueName: \"kubernetes.io/projected/781aaaf4-b6c0-4c1e-be97-725cd631e120-kube-api-access-jb5h4\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.112442 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-public-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214329 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb5h4\" (UniqueName: \"kubernetes.io/projected/781aaaf4-b6c0-4c1e-be97-725cd631e120-kube-api-access-jb5h4\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214404 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-public-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214457 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-config-data\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214577 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/781aaaf4-b6c0-4c1e-be97-725cd631e120-logs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214616 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.214630 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-internal-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.216194 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/781aaaf4-b6c0-4c1e-be97-725cd631e120-logs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.221847 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-config-data\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.222346 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-public-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.224219 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.231625 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/781aaaf4-b6c0-4c1e-be97-725cd631e120-internal-tls-certs\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.237866 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb5h4\" (UniqueName: \"kubernetes.io/projected/781aaaf4-b6c0-4c1e-be97-725cd631e120-kube-api-access-jb5h4\") pod \"nova-api-0\" (UID: \"781aaaf4-b6c0-4c1e-be97-725cd631e120\") " pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.365376 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.742554 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d978f0dd-2184-4133-a1c8-9df662a6b021" path="/var/lib/kubelet/pods/d978f0dd-2184-4133-a1c8-9df662a6b021/volumes" Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.806208 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 06 08:33:13 crc kubenswrapper[4763]: W1206 08:33:13.815451 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod781aaaf4_b6c0_4c1e_be97_725cd631e120.slice/crio-b52ca11b8ca97740acd54c6ae09f625e187213df32ea3cdd90b28de73f77e1e9 WatchSource:0}: Error finding container b52ca11b8ca97740acd54c6ae09f625e187213df32ea3cdd90b28de73f77e1e9: Status 404 returned error can't find the container with id b52ca11b8ca97740acd54c6ae09f625e187213df32ea3cdd90b28de73f77e1e9 Dec 06 08:33:13 crc kubenswrapper[4763]: I1206 08:33:13.962803 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"781aaaf4-b6c0-4c1e-be97-725cd631e120","Type":"ContainerStarted","Data":"b52ca11b8ca97740acd54c6ae09f625e187213df32ea3cdd90b28de73f77e1e9"} Dec 06 08:33:14 crc kubenswrapper[4763]: I1206 08:33:14.974345 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"781aaaf4-b6c0-4c1e-be97-725cd631e120","Type":"ContainerStarted","Data":"1ec7e30d1aedcf0806e54faa37b645cc37ad9d15b5f848203886765ba9cbcd36"} Dec 06 08:33:14 crc kubenswrapper[4763]: I1206 08:33:14.974718 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"781aaaf4-b6c0-4c1e-be97-725cd631e120","Type":"ContainerStarted","Data":"fa46c53f06154bc1c09ba561fff0e8b9f33be97922d424204df42dd6307ce687"} Dec 06 08:33:15 crc kubenswrapper[4763]: I1206 08:33:15.003010 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.002986849 podStartE2EDuration="3.002986849s" podCreationTimestamp="2025-12-06 08:33:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:33:14.99559755 +0000 UTC m=+1277.571302598" watchObservedRunningTime="2025-12-06 08:33:15.002986849 +0000 UTC m=+1277.578691897" Dec 06 08:33:15 crc kubenswrapper[4763]: I1206 08:33:15.456403 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:33:15 crc kubenswrapper[4763]: I1206 08:33:15.456611 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 06 08:33:16 crc kubenswrapper[4763]: I1206 08:33:16.414109 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 06 08:33:20 crc kubenswrapper[4763]: I1206 08:33:20.456373 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 06 08:33:20 crc kubenswrapper[4763]: I1206 08:33:20.457865 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 06 08:33:21 crc kubenswrapper[4763]: I1206 08:33:21.414291 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 06 08:33:21 crc kubenswrapper[4763]: I1206 08:33:21.448707 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 06 08:33:21 crc kubenswrapper[4763]: I1206 08:33:21.469814 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="db2b9f5c-c7d4-446f-a749-729c24b6ce44" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:33:21 crc kubenswrapper[4763]: I1206 08:33:21.469837 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="db2b9f5c-c7d4-446f-a749-729c24b6ce44" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:33:22 crc kubenswrapper[4763]: I1206 08:33:22.074448 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 06 08:33:23 crc kubenswrapper[4763]: I1206 08:33:23.367289 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:33:23 crc kubenswrapper[4763]: I1206 08:33:23.367364 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 06 08:33:24 crc kubenswrapper[4763]: I1206 08:33:24.383742 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="781aaaf4-b6c0-4c1e-be97-725cd631e120" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:33:24 crc kubenswrapper[4763]: I1206 08:33:24.383768 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="781aaaf4-b6c0-4c1e-be97-725cd631e120" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.223:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 06 08:33:26 crc kubenswrapper[4763]: I1206 08:33:26.147485 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 06 08:33:27 crc kubenswrapper[4763]: I1206 08:33:27.963331 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:27 crc kubenswrapper[4763]: I1206 08:33:27.978205 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.031855 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.151531 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6dpm\" (UniqueName: \"kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.151734 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.151865 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.254609 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.255311 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.255658 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6dpm\" (UniqueName: \"kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.256164 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.256609 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.288831 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6dpm\" (UniqueName: \"kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm\") pod \"redhat-operators-zdfvp\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.360216 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:28 crc kubenswrapper[4763]: I1206 08:33:28.881640 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:29 crc kubenswrapper[4763]: I1206 08:33:29.137877 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerStarted","Data":"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973"} Dec 06 08:33:29 crc kubenswrapper[4763]: I1206 08:33:29.137947 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerStarted","Data":"78a9f4e2817e164ab9aeaccca65dc755853a50c8d4318bb7d4d9c787b4e6fd5d"} Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.147221 4763 generic.go:334] "Generic (PLEG): container finished" podID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerID="18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973" exitCode=0 Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.147283 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerDied","Data":"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973"} Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.461753 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.461835 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.466469 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 06 08:33:30 crc kubenswrapper[4763]: I1206 08:33:30.469247 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 06 08:33:31 crc kubenswrapper[4763]: I1206 08:33:31.160366 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerStarted","Data":"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d"} Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.185687 4763 generic.go:334] "Generic (PLEG): container finished" podID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerID="ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d" exitCode=0 Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.185772 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerDied","Data":"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d"} Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.380573 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.381176 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.383753 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 06 08:33:33 crc kubenswrapper[4763]: I1206 08:33:33.394237 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 06 08:33:34 crc kubenswrapper[4763]: I1206 08:33:34.195933 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 06 08:33:34 crc kubenswrapper[4763]: I1206 08:33:34.204976 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 06 08:33:35 crc kubenswrapper[4763]: I1206 08:33:35.207563 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerStarted","Data":"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8"} Dec 06 08:33:35 crc kubenswrapper[4763]: I1206 08:33:35.229618 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zdfvp" podStartSLOduration=4.340918027 podStartE2EDuration="8.229600583s" podCreationTimestamp="2025-12-06 08:33:27 +0000 UTC" firstStartedPulling="2025-12-06 08:33:30.149879579 +0000 UTC m=+1292.725584637" lastFinishedPulling="2025-12-06 08:33:34.038562155 +0000 UTC m=+1296.614267193" observedRunningTime="2025-12-06 08:33:35.229243693 +0000 UTC m=+1297.804948731" watchObservedRunningTime="2025-12-06 08:33:35.229600583 +0000 UTC m=+1297.805305621" Dec 06 08:33:38 crc kubenswrapper[4763]: I1206 08:33:38.360764 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:38 crc kubenswrapper[4763]: I1206 08:33:38.361317 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:39 crc kubenswrapper[4763]: I1206 08:33:39.415861 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zdfvp" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="registry-server" probeResult="failure" output=< Dec 06 08:33:39 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 08:33:39 crc kubenswrapper[4763]: > Dec 06 08:33:42 crc kubenswrapper[4763]: I1206 08:33:42.537119 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:33:42 crc kubenswrapper[4763]: I1206 08:33:42.537717 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:33:43 crc kubenswrapper[4763]: I1206 08:33:43.201459 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:44 crc kubenswrapper[4763]: I1206 08:33:44.212263 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:46 crc kubenswrapper[4763]: I1206 08:33:46.880887 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="rabbitmq" containerID="cri-o://ae5bc594ad1b99faf621d1efacc68bfb3ff0154d9cc21ebf49cdbf64ae13b156" gracePeriod=604797 Dec 06 08:33:47 crc kubenswrapper[4763]: I1206 08:33:47.805994 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" containerID="cri-o://92e7ea483e4f925d1daa0f9bfaf84cc0536f9ae19fb35ee4c514d7710912b8a5" gracePeriod=604797 Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.355113 4763 generic.go:334] "Generic (PLEG): container finished" podID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerID="ae5bc594ad1b99faf621d1efacc68bfb3ff0154d9cc21ebf49cdbf64ae13b156" exitCode=0 Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.355198 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerDied","Data":"ae5bc594ad1b99faf621d1efacc68bfb3ff0154d9cc21ebf49cdbf64ae13b156"} Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.426874 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.455967 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.484364 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.596761 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.596832 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.596866 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.596982 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597129 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597158 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597226 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597258 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597297 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqdsb\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597375 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.597425 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data\") pod \"95ce87d2-e5c0-41f4-948a-e78e26077c91\" (UID: \"95ce87d2-e5c0-41f4-948a-e78e26077c91\") " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.598211 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.598742 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.599430 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.607065 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb" (OuterVolumeSpecName: "kube-api-access-nqdsb") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "kube-api-access-nqdsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.608813 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.610146 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.616768 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.623590 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info" (OuterVolumeSpecName: "pod-info") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.652476 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data" (OuterVolumeSpecName: "config-data") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.673101 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700321 4763 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/95ce87d2-e5c0-41f4-948a-e78e26077c91-pod-info\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700346 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700356 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqdsb\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-kube-api-access-nqdsb\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700366 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700374 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700384 4763 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700410 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700421 4763 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/95ce87d2-e5c0-41f4-948a-e78e26077c91-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.700430 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.702176 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf" (OuterVolumeSpecName: "server-conf") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.739104 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.796943 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "95ce87d2-e5c0-41f4-948a-e78e26077c91" (UID: "95ce87d2-e5c0-41f4-948a-e78e26077c91"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.801665 4763 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/95ce87d2-e5c0-41f4-948a-e78e26077c91-server-conf\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.801753 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:48 crc kubenswrapper[4763]: I1206 08:33:48.801808 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/95ce87d2-e5c0-41f4-948a-e78e26077c91-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.241055 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.367301 4763 generic.go:334] "Generic (PLEG): container finished" podID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerID="92e7ea483e4f925d1daa0f9bfaf84cc0536f9ae19fb35ee4c514d7710912b8a5" exitCode=0 Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.367458 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerDied","Data":"92e7ea483e4f925d1daa0f9bfaf84cc0536f9ae19fb35ee4c514d7710912b8a5"} Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.371639 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.371794 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"95ce87d2-e5c0-41f4-948a-e78e26077c91","Type":"ContainerDied","Data":"1cd3949dc76d0f52c87272fc7b725ba28277a33dfbba08824cd1bed69d1d868b"} Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.371843 4763 scope.go:117] "RemoveContainer" containerID="ae5bc594ad1b99faf621d1efacc68bfb3ff0154d9cc21ebf49cdbf64ae13b156" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.439646 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.449585 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.472209 4763 scope.go:117] "RemoveContainer" containerID="3b79be3dce95fdf97dc1243a8fffaa1bba79366e8c1a443fffc8ef3b3a25f3f0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.500218 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:49 crc kubenswrapper[4763]: E1206 08:33:49.501132 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="setup-container" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.501156 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="setup-container" Dec 06 08:33:49 crc kubenswrapper[4763]: E1206 08:33:49.501228 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="rabbitmq" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.501238 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="rabbitmq" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.503085 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" containerName="rabbitmq" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.509305 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.516390 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.522526 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.522722 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.522828 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.522973 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.523091 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.523175 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nhvp8" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.546836 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630115 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630172 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54a3c00e-d725-43d6-8afa-1a013c737071-pod-info\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630268 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-config-data\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630317 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630337 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630361 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5grxg\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-kube-api-access-5grxg\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630423 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630464 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-server-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630515 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630615 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54a3c00e-d725-43d6-8afa-1a013c737071-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.630785 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.639872 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734607 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734685 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734795 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlvjq\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734841 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734871 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.734984 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.735016 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.735052 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.736402 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.736831 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.737848 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95ce87d2-e5c0-41f4-948a-e78e26077c91" path="/var/lib/kubelet/pods/95ce87d2-e5c0-41f4-948a-e78e26077c91/volumes" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.742784 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.742636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.743315 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq" (OuterVolumeSpecName: "kube-api-access-vlvjq") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "kube-api-access-vlvjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.743823 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744117 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls\") pod \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\" (UID: \"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9\") " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744520 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54a3c00e-d725-43d6-8afa-1a013c737071-pod-info\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744641 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-config-data\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744710 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744741 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744979 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info" (OuterVolumeSpecName: "pod-info") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.744885 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5grxg\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-kube-api-access-5grxg\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.751736 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.751861 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-server-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.751889 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.751933 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54a3c00e-d725-43d6-8afa-1a013c737071-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.752031 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.752111 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.752215 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.752336 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.753436 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-server-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.755777 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.755831 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-config-data\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.756145 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.756390 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.757445 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54a3c00e-d725-43d6-8afa-1a013c737071-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.758082 4763 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.761052 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlvjq\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-kube-api-access-vlvjq\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.762590 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.762675 4763 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-pod-info\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.758148 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.761809 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.767912 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.767928 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.780355 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.790653 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54a3c00e-d725-43d6-8afa-1a013c737071-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.790773 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5grxg\" (UniqueName: \"kubernetes.io/projected/54a3c00e-d725-43d6-8afa-1a013c737071-kube-api-access-5grxg\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.791255 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54a3c00e-d725-43d6-8afa-1a013c737071-pod-info\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.809185 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data" (OuterVolumeSpecName: "config-data") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.830520 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.842775 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"54a3c00e-d725-43d6-8afa-1a013c737071\") " pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.860792 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf" (OuterVolumeSpecName: "server-conf") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.866713 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.866747 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.866757 4763 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-server-conf\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.866765 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.866774 4763 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.910259 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" (UID: "2d796b9e-4400-4e5f-9ce2-ef40f18f09c9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.954529 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 06 08:33:49 crc kubenswrapper[4763]: I1206 08:33:49.968588 4763 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.383288 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d796b9e-4400-4e5f-9ce2-ef40f18f09c9","Type":"ContainerDied","Data":"659aa450485048d1f0046170e3e8088d175d1ed6dd020f6b7783a3ca8491038e"} Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.383368 4763 scope.go:117] "RemoveContainer" containerID="92e7ea483e4f925d1daa0f9bfaf84cc0536f9ae19fb35ee4c514d7710912b8a5" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.383474 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.390121 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zdfvp" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="registry-server" containerID="cri-o://3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8" gracePeriod=2 Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.418110 4763 scope.go:117] "RemoveContainer" containerID="025330a3d2028eb40b7d062410c48bd3146ff2cd92275be82d0b7b00bf80c41c" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.425105 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.432688 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.456279 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:50 crc kubenswrapper[4763]: E1206 08:33:50.456729 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="setup-container" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.456747 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="setup-container" Dec 06 08:33:50 crc kubenswrapper[4763]: E1206 08:33:50.456759 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.456765 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.456965 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" containerName="rabbitmq" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.464677 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.468242 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.468627 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.468748 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.468870 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-l7674" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.470943 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.471100 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.471239 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.478457 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.524838 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.593939 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594020 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594115 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr8p5\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-kube-api-access-tr8p5\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594169 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594219 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594268 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594336 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594460 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594675 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594735 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.594778 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696604 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696646 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696674 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696706 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696735 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696823 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696858 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.696984 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.697028 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.697033 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.697063 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.697097 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr8p5\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-kube-api-access-tr8p5\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.698114 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.698779 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.699096 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.699189 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.699411 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.711806 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.712685 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.719346 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr8p5\" (UniqueName: \"kubernetes.io/projected/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-kube-api-access-tr8p5\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.719472 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.719778 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9acf66e1-d7d4-4ffb-afc9-c5b82328d606-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.766776 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9acf66e1-d7d4-4ffb-afc9-c5b82328d606\") " pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.794596 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.801722 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.901343 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content\") pod \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.901440 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities\") pod \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.901562 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6dpm\" (UniqueName: \"kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm\") pod \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\" (UID: \"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8\") " Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.903202 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities" (OuterVolumeSpecName: "utilities") pod "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" (UID: "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:50 crc kubenswrapper[4763]: I1206 08:33:50.905879 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm" (OuterVolumeSpecName: "kube-api-access-r6dpm") pod "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" (UID: "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8"). InnerVolumeSpecName "kube-api-access-r6dpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.003409 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.003885 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6dpm\" (UniqueName: \"kubernetes.io/projected/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-kube-api-access-r6dpm\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.057266 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" (UID: "6c1c0c0e-8312-407b-a98d-3fc9e16f55f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.105694 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.358833 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 06 08:33:51 crc kubenswrapper[4763]: W1206 08:33:51.360229 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9acf66e1_d7d4_4ffb_afc9_c5b82328d606.slice/crio-1f9ed2e292aa9c3d30256705c2a913c72c850e94d40a5c358444c2d1ebb2d44e WatchSource:0}: Error finding container 1f9ed2e292aa9c3d30256705c2a913c72c850e94d40a5c358444c2d1ebb2d44e: Status 404 returned error can't find the container with id 1f9ed2e292aa9c3d30256705c2a913c72c850e94d40a5c358444c2d1ebb2d44e Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.415950 4763 generic.go:334] "Generic (PLEG): container finished" podID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerID="3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8" exitCode=0 Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.416010 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerDied","Data":"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8"} Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.416036 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zdfvp" event={"ID":"6c1c0c0e-8312-407b-a98d-3fc9e16f55f8","Type":"ContainerDied","Data":"78a9f4e2817e164ab9aeaccca65dc755853a50c8d4318bb7d4d9c787b4e6fd5d"} Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.416054 4763 scope.go:117] "RemoveContainer" containerID="3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.416219 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zdfvp" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.422521 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"54a3c00e-d725-43d6-8afa-1a013c737071","Type":"ContainerStarted","Data":"199e6f4800328843ba29cebae40746482a0f59e1c689fe1e98b422d3cf1de2de"} Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.424518 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9acf66e1-d7d4-4ffb-afc9-c5b82328d606","Type":"ContainerStarted","Data":"1f9ed2e292aa9c3d30256705c2a913c72c850e94d40a5c358444c2d1ebb2d44e"} Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.724246 4763 scope.go:117] "RemoveContainer" containerID="ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.758138 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d796b9e-4400-4e5f-9ce2-ef40f18f09c9" path="/var/lib/kubelet/pods/2d796b9e-4400-4e5f-9ce2-ef40f18f09c9/volumes" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.776766 4763 scope.go:117] "RemoveContainer" containerID="18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.781867 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.800915 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zdfvp"] Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.825500 4763 scope.go:117] "RemoveContainer" containerID="3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8" Dec 06 08:33:51 crc kubenswrapper[4763]: E1206 08:33:51.826427 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8\": container with ID starting with 3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8 not found: ID does not exist" containerID="3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.826499 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8"} err="failed to get container status \"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8\": rpc error: code = NotFound desc = could not find container \"3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8\": container with ID starting with 3dfef677381cbf659fc427f8815af8354295d57c4f41063d076c25594ef995e8 not found: ID does not exist" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.826525 4763 scope.go:117] "RemoveContainer" containerID="ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d" Dec 06 08:33:51 crc kubenswrapper[4763]: E1206 08:33:51.827407 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d\": container with ID starting with ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d not found: ID does not exist" containerID="ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.827447 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d"} err="failed to get container status \"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d\": rpc error: code = NotFound desc = could not find container \"ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d\": container with ID starting with ad866ed12b1b4b05200163a365bef92d62386af3bda48dfdde16be3cc89e106d not found: ID does not exist" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.827474 4763 scope.go:117] "RemoveContainer" containerID="18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973" Dec 06 08:33:51 crc kubenswrapper[4763]: E1206 08:33:51.827706 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973\": container with ID starting with 18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973 not found: ID does not exist" containerID="18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973" Dec 06 08:33:51 crc kubenswrapper[4763]: I1206 08:33:51.827730 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973"} err="failed to get container status \"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973\": rpc error: code = NotFound desc = could not find container \"18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973\": container with ID starting with 18facf1c17626790fc6528247de4fefb73e761c493c3d2fa796a7253e315b973 not found: ID does not exist" Dec 06 08:33:51 crc kubenswrapper[4763]: E1206 08:33:51.939365 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c1c0c0e_8312_407b_a98d_3fc9e16f55f8.slice\": RecentStats: unable to find data in memory cache]" Dec 06 08:33:52 crc kubenswrapper[4763]: I1206 08:33:52.436595 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"54a3c00e-d725-43d6-8afa-1a013c737071","Type":"ContainerStarted","Data":"8a74dbbcbc15a8686e7b9b095442623807f9168eea3ffece226444c2a72b3667"} Dec 06 08:33:53 crc kubenswrapper[4763]: I1206 08:33:53.449188 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9acf66e1-d7d4-4ffb-afc9-c5b82328d606","Type":"ContainerStarted","Data":"d49b95c8ab6bad08d0c9ad44e0a9b0c9b85d132f204083d70a152bf77fb53cd8"} Dec 06 08:33:53 crc kubenswrapper[4763]: I1206 08:33:53.733431 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" path="/var/lib/kubelet/pods/6c1c0c0e-8312-407b-a98d-3fc9e16f55f8/volumes" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.190144 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:33:59 crc kubenswrapper[4763]: E1206 08:33:59.191267 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="extract-utilities" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.191286 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="extract-utilities" Dec 06 08:33:59 crc kubenswrapper[4763]: E1206 08:33:59.191312 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="extract-content" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.191320 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="extract-content" Dec 06 08:33:59 crc kubenswrapper[4763]: E1206 08:33:59.191335 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="registry-server" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.191344 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="registry-server" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.191606 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c1c0c0e-8312-407b-a98d-3fc9e16f55f8" containerName="registry-server" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.192919 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.195646 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.208965 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.276880 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.276985 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.277056 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkvv9\" (UniqueName: \"kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.277089 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.277118 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.277141 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.277201 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.378870 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379224 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379282 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379346 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkvv9\" (UniqueName: \"kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379379 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379405 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379427 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.379735 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.380367 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.380493 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.380500 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.380812 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.380863 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.400176 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkvv9\" (UniqueName: \"kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9\") pod \"dnsmasq-dns-7978469d7f-4m686\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.549562 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:33:59 crc kubenswrapper[4763]: I1206 08:33:59.995820 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:34:00 crc kubenswrapper[4763]: I1206 08:34:00.545342 4763 generic.go:334] "Generic (PLEG): container finished" podID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerID="ac9861d86e623d371b30a90c4d4bfa87b0d6ffffd0aff19f4c3288903c619f74" exitCode=0 Dec 06 08:34:00 crc kubenswrapper[4763]: I1206 08:34:00.545381 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7978469d7f-4m686" event={"ID":"713b2d57-242c-4d84-b4b5-9dcd6861a029","Type":"ContainerDied","Data":"ac9861d86e623d371b30a90c4d4bfa87b0d6ffffd0aff19f4c3288903c619f74"} Dec 06 08:34:00 crc kubenswrapper[4763]: I1206 08:34:00.545426 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7978469d7f-4m686" event={"ID":"713b2d57-242c-4d84-b4b5-9dcd6861a029","Type":"ContainerStarted","Data":"be3b1d2c9732e3e2dfa5d9bf3b4731d53fa2a9d7a452ba45732c452e8f350570"} Dec 06 08:34:01 crc kubenswrapper[4763]: I1206 08:34:01.556208 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7978469d7f-4m686" event={"ID":"713b2d57-242c-4d84-b4b5-9dcd6861a029","Type":"ContainerStarted","Data":"5329b5ad7d5b6d685691442e507b6e7b6ed1450233002bdfb04d8bac5d952d24"} Dec 06 08:34:01 crc kubenswrapper[4763]: I1206 08:34:01.556551 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:34:01 crc kubenswrapper[4763]: I1206 08:34:01.572640 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7978469d7f-4m686" podStartSLOduration=2.572619862 podStartE2EDuration="2.572619862s" podCreationTimestamp="2025-12-06 08:33:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:34:01.571920543 +0000 UTC m=+1324.147625611" watchObservedRunningTime="2025-12-06 08:34:01.572619862 +0000 UTC m=+1324.148324900" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.552221 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.634122 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.634453 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="dnsmasq-dns" containerID="cri-o://e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac" gracePeriod=10 Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.835383 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-858bd97c49-lcnf9"] Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.838400 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.860647 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858bd97c49-lcnf9"] Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.925642 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-swift-storage-0\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.925885 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-nb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.926347 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7w2g\" (UniqueName: \"kubernetes.io/projected/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-kube-api-access-q7w2g\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.926381 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-config\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.926403 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-openstack-edpm-ipam\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.926443 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-sb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:09 crc kubenswrapper[4763]: I1206 08:34:09.926642 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-svc\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031446 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-nb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031570 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7w2g\" (UniqueName: \"kubernetes.io/projected/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-kube-api-access-q7w2g\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031592 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-config\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031608 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-openstack-edpm-ipam\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031630 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-sb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031687 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-svc\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.031729 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-swift-storage-0\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.032735 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-swift-storage-0\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.033323 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-nb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.034509 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-dns-svc\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.034837 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-config\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.035912 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-ovsdbserver-sb\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.049789 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-openstack-edpm-ipam\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.060212 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7w2g\" (UniqueName: \"kubernetes.io/projected/1449acdf-6256-4a8c-8cb5-a4a4d1706d26-kube-api-access-q7w2g\") pod \"dnsmasq-dns-858bd97c49-lcnf9\" (UID: \"1449acdf-6256-4a8c-8cb5-a4a4d1706d26\") " pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.160965 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.296582 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439364 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79zm7\" (UniqueName: \"kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439427 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439519 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439555 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439610 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.439676 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc\") pod \"c1ea635b-72f7-4abe-8996-76e8905177cf\" (UID: \"c1ea635b-72f7-4abe-8996-76e8905177cf\") " Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.446062 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7" (OuterVolumeSpecName: "kube-api-access-79zm7") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "kube-api-access-79zm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.510204 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.515349 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.518702 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.520728 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config" (OuterVolumeSpecName: "config") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.535615 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c1ea635b-72f7-4abe-8996-76e8905177cf" (UID: "c1ea635b-72f7-4abe-8996-76e8905177cf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542324 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542359 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79zm7\" (UniqueName: \"kubernetes.io/projected/c1ea635b-72f7-4abe-8996-76e8905177cf-kube-api-access-79zm7\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542371 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542387 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542405 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.542417 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ea635b-72f7-4abe-8996-76e8905177cf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.642332 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-858bd97c49-lcnf9"] Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.712611 4763 generic.go:334] "Generic (PLEG): container finished" podID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerID="e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac" exitCode=0 Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.712688 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" event={"ID":"c1ea635b-72f7-4abe-8996-76e8905177cf","Type":"ContainerDied","Data":"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac"} Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.712720 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" event={"ID":"c1ea635b-72f7-4abe-8996-76e8905177cf","Type":"ContainerDied","Data":"8254b1427d03f98dbfe03fc46efd9fd5b9198a3fc6a31a7cc96764df48d3f1c4"} Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.712739 4763 scope.go:117] "RemoveContainer" containerID="e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.712940 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.719389 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" event={"ID":"1449acdf-6256-4a8c-8cb5-a4a4d1706d26","Type":"ContainerStarted","Data":"257788986a4537f4c1862fa305bfa410b84549dfcc48e6d172c7050bacf9f6e1"} Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.743992 4763 scope.go:117] "RemoveContainer" containerID="1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.753145 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.761265 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bd959b5-fbkvk"] Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.784487 4763 scope.go:117] "RemoveContainer" containerID="e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac" Dec 06 08:34:10 crc kubenswrapper[4763]: E1206 08:34:10.785409 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac\": container with ID starting with e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac not found: ID does not exist" containerID="e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.785460 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac"} err="failed to get container status \"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac\": rpc error: code = NotFound desc = could not find container \"e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac\": container with ID starting with e5a11c4dc10fdb55a4da9bcac876ed3cbe199d46f22657b2673f0442f1e54eac not found: ID does not exist" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.785496 4763 scope.go:117] "RemoveContainer" containerID="1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260" Dec 06 08:34:10 crc kubenswrapper[4763]: E1206 08:34:10.786171 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260\": container with ID starting with 1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260 not found: ID does not exist" containerID="1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260" Dec 06 08:34:10 crc kubenswrapper[4763]: I1206 08:34:10.786214 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260"} err="failed to get container status \"1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260\": rpc error: code = NotFound desc = could not find container \"1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260\": container with ID starting with 1360d2af19135ca400ca0675005b88f100843c408a4e679c8b9da5d04e233260 not found: ID does not exist" Dec 06 08:34:11 crc kubenswrapper[4763]: I1206 08:34:11.746283 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" path="/var/lib/kubelet/pods/c1ea635b-72f7-4abe-8996-76e8905177cf/volumes" Dec 06 08:34:11 crc kubenswrapper[4763]: I1206 08:34:11.749729 4763 generic.go:334] "Generic (PLEG): container finished" podID="1449acdf-6256-4a8c-8cb5-a4a4d1706d26" containerID="75a82545ab2c4945321bfb1526162667d442f31059eb13eedbb27f62afbc9b85" exitCode=0 Dec 06 08:34:11 crc kubenswrapper[4763]: I1206 08:34:11.749791 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" event={"ID":"1449acdf-6256-4a8c-8cb5-a4a4d1706d26","Type":"ContainerDied","Data":"75a82545ab2c4945321bfb1526162667d442f31059eb13eedbb27f62afbc9b85"} Dec 06 08:34:12 crc kubenswrapper[4763]: I1206 08:34:12.536779 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:34:12 crc kubenswrapper[4763]: I1206 08:34:12.537580 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:34:12 crc kubenswrapper[4763]: I1206 08:34:12.766033 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" event={"ID":"1449acdf-6256-4a8c-8cb5-a4a4d1706d26","Type":"ContainerStarted","Data":"d197139467706069dcc82a3cf4ac708db9482738edb46dfc3fad7e0b8142ecaa"} Dec 06 08:34:12 crc kubenswrapper[4763]: I1206 08:34:12.766252 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:12 crc kubenswrapper[4763]: I1206 08:34:12.790364 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" podStartSLOduration=3.790344114 podStartE2EDuration="3.790344114s" podCreationTimestamp="2025-12-06 08:34:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:34:12.789215913 +0000 UTC m=+1335.364920951" watchObservedRunningTime="2025-12-06 08:34:12.790344114 +0000 UTC m=+1335.366049162" Dec 06 08:34:15 crc kubenswrapper[4763]: I1206 08:34:15.098787 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84bd959b5-fbkvk" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.217:5353: i/o timeout" Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.162114 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-858bd97c49-lcnf9" Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.240002 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.240376 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7978469d7f-4m686" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="dnsmasq-dns" containerID="cri-o://5329b5ad7d5b6d685691442e507b6e7b6ed1450233002bdfb04d8bac5d952d24" gracePeriod=10 Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.834886 4763 generic.go:334] "Generic (PLEG): container finished" podID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerID="5329b5ad7d5b6d685691442e507b6e7b6ed1450233002bdfb04d8bac5d952d24" exitCode=0 Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.835072 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7978469d7f-4m686" event={"ID":"713b2d57-242c-4d84-b4b5-9dcd6861a029","Type":"ContainerDied","Data":"5329b5ad7d5b6d685691442e507b6e7b6ed1450233002bdfb04d8bac5d952d24"} Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.835185 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7978469d7f-4m686" event={"ID":"713b2d57-242c-4d84-b4b5-9dcd6861a029","Type":"ContainerDied","Data":"be3b1d2c9732e3e2dfa5d9bf3b4731d53fa2a9d7a452ba45732c452e8f350570"} Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.835201 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be3b1d2c9732e3e2dfa5d9bf3b4731d53fa2a9d7a452ba45732c452e8f350570" Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.873541 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.957680 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.957806 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.957857 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.957882 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.957917 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.958085 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.958155 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkvv9\" (UniqueName: \"kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9\") pod \"713b2d57-242c-4d84-b4b5-9dcd6861a029\" (UID: \"713b2d57-242c-4d84-b4b5-9dcd6861a029\") " Dec 06 08:34:20 crc kubenswrapper[4763]: I1206 08:34:20.963584 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9" (OuterVolumeSpecName: "kube-api-access-kkvv9") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "kube-api-access-kkvv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.035053 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.043395 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.044400 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config" (OuterVolumeSpecName: "config") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.051843 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.054566 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060307 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060504 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkvv9\" (UniqueName: \"kubernetes.io/projected/713b2d57-242c-4d84-b4b5-9dcd6861a029-kube-api-access-kkvv9\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060588 4763 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060669 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060723 4763 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.060772 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.063988 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "713b2d57-242c-4d84-b4b5-9dcd6861a029" (UID: "713b2d57-242c-4d84-b4b5-9dcd6861a029"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.162493 4763 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/713b2d57-242c-4d84-b4b5-9dcd6861a029-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.844050 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7978469d7f-4m686" Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.864882 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:34:21 crc kubenswrapper[4763]: I1206 08:34:21.873269 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7978469d7f-4m686"] Dec 06 08:34:23 crc kubenswrapper[4763]: I1206 08:34:23.734082 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" path="/var/lib/kubelet/pods/713b2d57-242c-4d84-b4b5-9dcd6861a029/volumes" Dec 06 08:34:24 crc kubenswrapper[4763]: I1206 08:34:24.877635 4763 generic.go:334] "Generic (PLEG): container finished" podID="54a3c00e-d725-43d6-8afa-1a013c737071" containerID="8a74dbbcbc15a8686e7b9b095442623807f9168eea3ffece226444c2a72b3667" exitCode=0 Dec 06 08:34:24 crc kubenswrapper[4763]: I1206 08:34:24.877729 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"54a3c00e-d725-43d6-8afa-1a013c737071","Type":"ContainerDied","Data":"8a74dbbcbc15a8686e7b9b095442623807f9168eea3ffece226444c2a72b3667"} Dec 06 08:34:25 crc kubenswrapper[4763]: I1206 08:34:25.889109 4763 generic.go:334] "Generic (PLEG): container finished" podID="9acf66e1-d7d4-4ffb-afc9-c5b82328d606" containerID="d49b95c8ab6bad08d0c9ad44e0a9b0c9b85d132f204083d70a152bf77fb53cd8" exitCode=0 Dec 06 08:34:25 crc kubenswrapper[4763]: I1206 08:34:25.889191 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9acf66e1-d7d4-4ffb-afc9-c5b82328d606","Type":"ContainerDied","Data":"d49b95c8ab6bad08d0c9ad44e0a9b0c9b85d132f204083d70a152bf77fb53cd8"} Dec 06 08:34:25 crc kubenswrapper[4763]: I1206 08:34:25.893109 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"54a3c00e-d725-43d6-8afa-1a013c737071","Type":"ContainerStarted","Data":"9b83c77778bf712927773a5a9db5d2fd1b0eca1055e27a734073dbd92672e08f"} Dec 06 08:34:25 crc kubenswrapper[4763]: I1206 08:34:25.893334 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 06 08:34:25 crc kubenswrapper[4763]: I1206 08:34:25.981495 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.981471603 podStartE2EDuration="36.981471603s" podCreationTimestamp="2025-12-06 08:33:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:34:25.946738156 +0000 UTC m=+1348.522443194" watchObservedRunningTime="2025-12-06 08:34:25.981471603 +0000 UTC m=+1348.557176641" Dec 06 08:34:26 crc kubenswrapper[4763]: I1206 08:34:26.906420 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9acf66e1-d7d4-4ffb-afc9-c5b82328d606","Type":"ContainerStarted","Data":"ebca87131d50d7cdd3c527dfc260a0060115d5b1650e23e1b79dc1c4a6ff1fc6"} Dec 06 08:34:26 crc kubenswrapper[4763]: I1206 08:34:26.907125 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:34:26 crc kubenswrapper[4763]: I1206 08:34:26.950082 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.950050978 podStartE2EDuration="36.950050978s" podCreationTimestamp="2025-12-06 08:33:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:34:26.93311371 +0000 UTC m=+1349.508818758" watchObservedRunningTime="2025-12-06 08:34:26.950050978 +0000 UTC m=+1349.525756016" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.858396 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2"] Dec 06 08:34:38 crc kubenswrapper[4763]: E1206 08:34:38.859860 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.859982 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: E1206 08:34:38.860006 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="init" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.860014 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="init" Dec 06 08:34:38 crc kubenswrapper[4763]: E1206 08:34:38.860118 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.860132 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: E1206 08:34:38.860179 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="init" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.860187 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="init" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.860767 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="713b2d57-242c-4d84-b4b5-9dcd6861a029" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.860796 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ea635b-72f7-4abe-8996-76e8905177cf" containerName="dnsmasq-dns" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.864147 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.866565 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.868329 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.868580 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.869023 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:34:38 crc kubenswrapper[4763]: I1206 08:34:38.903349 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2"] Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.016253 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.016940 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.017199 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fndgf\" (UniqueName: \"kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.017344 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.120268 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fndgf\" (UniqueName: \"kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.120350 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.120391 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.120554 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.130602 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.137722 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.137992 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.139259 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fndgf\" (UniqueName: \"kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-496t2\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.202202 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.802660 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2"] Dec 06 08:34:39 crc kubenswrapper[4763]: I1206 08:34:39.956033 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="54a3c00e-d725-43d6-8afa-1a013c737071" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.225:5671: connect: connection refused" Dec 06 08:34:40 crc kubenswrapper[4763]: I1206 08:34:40.049823 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" event={"ID":"61414443-2847-4a81-8bbb-af167c4ff3c6","Type":"ContainerStarted","Data":"2138093b515558f9529ddd420f2ff2b15448b45d225cee875d669a017d8fbebe"} Dec 06 08:34:40 crc kubenswrapper[4763]: I1206 08:34:40.804324 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="9acf66e1-d7d4-4ffb-afc9-c5b82328d606" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.226:5671: connect: connection refused" Dec 06 08:34:42 crc kubenswrapper[4763]: I1206 08:34:42.536944 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:34:42 crc kubenswrapper[4763]: I1206 08:34:42.537476 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:34:42 crc kubenswrapper[4763]: I1206 08:34:42.537524 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:34:42 crc kubenswrapper[4763]: I1206 08:34:42.538357 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:34:42 crc kubenswrapper[4763]: I1206 08:34:42.538404 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd" gracePeriod=600 Dec 06 08:34:43 crc kubenswrapper[4763]: I1206 08:34:43.081179 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd" exitCode=0 Dec 06 08:34:43 crc kubenswrapper[4763]: I1206 08:34:43.081219 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd"} Dec 06 08:34:43 crc kubenswrapper[4763]: I1206 08:34:43.081255 4763 scope.go:117] "RemoveContainer" containerID="c874920460a590f92765b487dcad196b365be507c27bb07cd2fdb6e943ba11c4" Dec 06 08:34:44 crc kubenswrapper[4763]: I1206 08:34:44.095359 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206"} Dec 06 08:34:49 crc kubenswrapper[4763]: I1206 08:34:49.956047 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 06 08:34:50 crc kubenswrapper[4763]: I1206 08:34:50.803097 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 06 08:34:51 crc kubenswrapper[4763]: I1206 08:34:51.225825 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" event={"ID":"61414443-2847-4a81-8bbb-af167c4ff3c6","Type":"ContainerStarted","Data":"4117cdf8e54eddfe1ced26b2ed6f431b5511f031c883209a91e7a369f7288995"} Dec 06 08:34:51 crc kubenswrapper[4763]: I1206 08:34:51.247846 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" podStartSLOduration=2.606003549 podStartE2EDuration="13.24782721s" podCreationTimestamp="2025-12-06 08:34:38 +0000 UTC" firstStartedPulling="2025-12-06 08:34:39.807299624 +0000 UTC m=+1362.383004662" lastFinishedPulling="2025-12-06 08:34:50.449123285 +0000 UTC m=+1373.024828323" observedRunningTime="2025-12-06 08:34:51.241041787 +0000 UTC m=+1373.816746825" watchObservedRunningTime="2025-12-06 08:34:51.24782721 +0000 UTC m=+1373.823532238" Dec 06 08:35:07 crc kubenswrapper[4763]: I1206 08:35:07.404311 4763 generic.go:334] "Generic (PLEG): container finished" podID="61414443-2847-4a81-8bbb-af167c4ff3c6" containerID="4117cdf8e54eddfe1ced26b2ed6f431b5511f031c883209a91e7a369f7288995" exitCode=0 Dec 06 08:35:07 crc kubenswrapper[4763]: I1206 08:35:07.404403 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" event={"ID":"61414443-2847-4a81-8bbb-af167c4ff3c6","Type":"ContainerDied","Data":"4117cdf8e54eddfe1ced26b2ed6f431b5511f031c883209a91e7a369f7288995"} Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.015270 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.068065 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key\") pod \"61414443-2847-4a81-8bbb-af167c4ff3c6\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.068256 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fndgf\" (UniqueName: \"kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf\") pod \"61414443-2847-4a81-8bbb-af167c4ff3c6\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.068634 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory\") pod \"61414443-2847-4a81-8bbb-af167c4ff3c6\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.068783 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle\") pod \"61414443-2847-4a81-8bbb-af167c4ff3c6\" (UID: \"61414443-2847-4a81-8bbb-af167c4ff3c6\") " Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.088262 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf" (OuterVolumeSpecName: "kube-api-access-fndgf") pod "61414443-2847-4a81-8bbb-af167c4ff3c6" (UID: "61414443-2847-4a81-8bbb-af167c4ff3c6"). InnerVolumeSpecName "kube-api-access-fndgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.093242 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "61414443-2847-4a81-8bbb-af167c4ff3c6" (UID: "61414443-2847-4a81-8bbb-af167c4ff3c6"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.117359 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory" (OuterVolumeSpecName: "inventory") pod "61414443-2847-4a81-8bbb-af167c4ff3c6" (UID: "61414443-2847-4a81-8bbb-af167c4ff3c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.117808 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61414443-2847-4a81-8bbb-af167c4ff3c6" (UID: "61414443-2847-4a81-8bbb-af167c4ff3c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.172168 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.172199 4763 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.172216 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61414443-2847-4a81-8bbb-af167c4ff3c6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.172231 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fndgf\" (UniqueName: \"kubernetes.io/projected/61414443-2847-4a81-8bbb-af167c4ff3c6-kube-api-access-fndgf\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.425248 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" event={"ID":"61414443-2847-4a81-8bbb-af167c4ff3c6","Type":"ContainerDied","Data":"2138093b515558f9529ddd420f2ff2b15448b45d225cee875d669a017d8fbebe"} Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.425301 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2138093b515558f9529ddd420f2ff2b15448b45d225cee875d669a017d8fbebe" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.425381 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-496t2" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.522878 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q"] Dec 06 08:35:09 crc kubenswrapper[4763]: E1206 08:35:09.523366 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61414443-2847-4a81-8bbb-af167c4ff3c6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.523388 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="61414443-2847-4a81-8bbb-af167c4ff3c6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.523606 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="61414443-2847-4a81-8bbb-af167c4ff3c6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.524404 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.530709 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.530875 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.531054 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.531400 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q"] Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.532605 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.581005 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.581212 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdsq5\" (UniqueName: \"kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.581270 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.683064 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdsq5\" (UniqueName: \"kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.683157 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.683243 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.687939 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.688589 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.703650 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdsq5\" (UniqueName: \"kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gdj8q\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:09 crc kubenswrapper[4763]: I1206 08:35:09.891750 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:10 crc kubenswrapper[4763]: I1206 08:35:10.381607 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q"] Dec 06 08:35:10 crc kubenswrapper[4763]: I1206 08:35:10.436343 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" event={"ID":"5e76d5c3-ab36-42b8-ab61-39e14274b162","Type":"ContainerStarted","Data":"305e0abb4983e899e069280e3261f0d146305fbc6606901a0522ad1e6bb3e9b1"} Dec 06 08:35:16 crc kubenswrapper[4763]: I1206 08:35:16.504232 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" event={"ID":"5e76d5c3-ab36-42b8-ab61-39e14274b162","Type":"ContainerStarted","Data":"d009ccdb15bdfd00e32d6306e064b7e5d30342b82edd4be7b35cb2c3cb82ca78"} Dec 06 08:35:16 crc kubenswrapper[4763]: I1206 08:35:16.537760 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" podStartSLOduration=2.996983741 podStartE2EDuration="7.537736656s" podCreationTimestamp="2025-12-06 08:35:09 +0000 UTC" firstStartedPulling="2025-12-06 08:35:10.386254721 +0000 UTC m=+1392.961959759" lastFinishedPulling="2025-12-06 08:35:14.927007636 +0000 UTC m=+1397.502712674" observedRunningTime="2025-12-06 08:35:16.527695434 +0000 UTC m=+1399.103400472" watchObservedRunningTime="2025-12-06 08:35:16.537736656 +0000 UTC m=+1399.113441714" Dec 06 08:35:18 crc kubenswrapper[4763]: I1206 08:35:18.528982 4763 generic.go:334] "Generic (PLEG): container finished" podID="5e76d5c3-ab36-42b8-ab61-39e14274b162" containerID="d009ccdb15bdfd00e32d6306e064b7e5d30342b82edd4be7b35cb2c3cb82ca78" exitCode=0 Dec 06 08:35:18 crc kubenswrapper[4763]: I1206 08:35:18.529074 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" event={"ID":"5e76d5c3-ab36-42b8-ab61-39e14274b162","Type":"ContainerDied","Data":"d009ccdb15bdfd00e32d6306e064b7e5d30342b82edd4be7b35cb2c3cb82ca78"} Dec 06 08:35:19 crc kubenswrapper[4763]: I1206 08:35:19.981327 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.008203 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdsq5\" (UniqueName: \"kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5\") pod \"5e76d5c3-ab36-42b8-ab61-39e14274b162\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.008417 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key\") pod \"5e76d5c3-ab36-42b8-ab61-39e14274b162\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.008476 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory\") pod \"5e76d5c3-ab36-42b8-ab61-39e14274b162\" (UID: \"5e76d5c3-ab36-42b8-ab61-39e14274b162\") " Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.016754 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5" (OuterVolumeSpecName: "kube-api-access-fdsq5") pod "5e76d5c3-ab36-42b8-ab61-39e14274b162" (UID: "5e76d5c3-ab36-42b8-ab61-39e14274b162"). InnerVolumeSpecName "kube-api-access-fdsq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.043118 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory" (OuterVolumeSpecName: "inventory") pod "5e76d5c3-ab36-42b8-ab61-39e14274b162" (UID: "5e76d5c3-ab36-42b8-ab61-39e14274b162"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.054018 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5e76d5c3-ab36-42b8-ab61-39e14274b162" (UID: "5e76d5c3-ab36-42b8-ab61-39e14274b162"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.112379 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdsq5\" (UniqueName: \"kubernetes.io/projected/5e76d5c3-ab36-42b8-ab61-39e14274b162-kube-api-access-fdsq5\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.112427 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.112440 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e76d5c3-ab36-42b8-ab61-39e14274b162-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.560189 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" event={"ID":"5e76d5c3-ab36-42b8-ab61-39e14274b162","Type":"ContainerDied","Data":"305e0abb4983e899e069280e3261f0d146305fbc6606901a0522ad1e6bb3e9b1"} Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.560261 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="305e0abb4983e899e069280e3261f0d146305fbc6606901a0522ad1e6bb3e9b1" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.560364 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gdj8q" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.627460 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr"] Dec 06 08:35:20 crc kubenswrapper[4763]: E1206 08:35:20.628266 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e76d5c3-ab36-42b8-ab61-39e14274b162" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.628343 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e76d5c3-ab36-42b8-ab61-39e14274b162" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.628666 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e76d5c3-ab36-42b8-ab61-39e14274b162" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.629635 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.639221 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr"] Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.664380 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.664668 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.664871 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.666525 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.727312 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm7sr\" (UniqueName: \"kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.727373 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.727661 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.727887 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.774278 4763 scope.go:117] "RemoveContainer" containerID="8bf783d1994ffac8d83cc1c4f7d3f9c286a83009090459e4f3b05e1b0c096f2f" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.829267 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.830444 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.830796 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm7sr\" (UniqueName: \"kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.830844 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.835141 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.835173 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.835863 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.852649 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm7sr\" (UniqueName: \"kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:20 crc kubenswrapper[4763]: I1206 08:35:20.991489 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:35:21 crc kubenswrapper[4763]: I1206 08:35:21.632392 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr"] Dec 06 08:35:22 crc kubenswrapper[4763]: I1206 08:35:22.585299 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" event={"ID":"b775bb6f-096f-4232-b395-664dce5d049b","Type":"ContainerStarted","Data":"22309fb858a74ddae95bcd6db589599eda89d85d0cb12638a8756b7a20d166bc"} Dec 06 08:35:24 crc kubenswrapper[4763]: I1206 08:35:24.610725 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" event={"ID":"b775bb6f-096f-4232-b395-664dce5d049b","Type":"ContainerStarted","Data":"b6de7f459ca40a7bc2885569c69091c4be64cb2f03f281a161095f58d464b70d"} Dec 06 08:35:24 crc kubenswrapper[4763]: I1206 08:35:24.635036 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" podStartSLOduration=3.652350943 podStartE2EDuration="4.635012953s" podCreationTimestamp="2025-12-06 08:35:20 +0000 UTC" firstStartedPulling="2025-12-06 08:35:21.637151926 +0000 UTC m=+1404.212856964" lastFinishedPulling="2025-12-06 08:35:22.619813936 +0000 UTC m=+1405.195518974" observedRunningTime="2025-12-06 08:35:24.627390676 +0000 UTC m=+1407.203095744" watchObservedRunningTime="2025-12-06 08:35:24.635012953 +0000 UTC m=+1407.210717991" Dec 06 08:36:20 crc kubenswrapper[4763]: I1206 08:36:20.962404 4763 scope.go:117] "RemoveContainer" containerID="d6d464e842d72aa92c3569828d3c6d1425b5073132344f84405a41f2616e8cd8" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.157145 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.159736 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.171818 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.325865 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4qnh\" (UniqueName: \"kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.326040 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.326130 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.427649 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.427771 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.427814 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4qnh\" (UniqueName: \"kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.428356 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.428369 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.448775 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4qnh\" (UniqueName: \"kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh\") pod \"redhat-marketplace-xvv4v\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.491494 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:36:55 crc kubenswrapper[4763]: I1206 08:36:55.972462 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:36:56 crc kubenswrapper[4763]: I1206 08:36:56.624227 4763 generic.go:334] "Generic (PLEG): container finished" podID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerID="f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407" exitCode=0 Dec 06 08:36:56 crc kubenswrapper[4763]: I1206 08:36:56.624502 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerDied","Data":"f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407"} Dec 06 08:36:56 crc kubenswrapper[4763]: I1206 08:36:56.624528 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerStarted","Data":"ae4ff986d8b1c0b821755c74fd9a2e7b2493ad8760b42bcc1e7c95d6d53d3724"} Dec 06 08:36:56 crc kubenswrapper[4763]: I1206 08:36:56.626924 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:36:57 crc kubenswrapper[4763]: I1206 08:36:57.635416 4763 generic.go:334] "Generic (PLEG): container finished" podID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerID="476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8" exitCode=0 Dec 06 08:36:57 crc kubenswrapper[4763]: I1206 08:36:57.635462 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerDied","Data":"476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8"} Dec 06 08:36:58 crc kubenswrapper[4763]: I1206 08:36:58.648320 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerStarted","Data":"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8"} Dec 06 08:36:58 crc kubenswrapper[4763]: I1206 08:36:58.677635 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xvv4v" podStartSLOduration=2.160048602 podStartE2EDuration="3.677617395s" podCreationTimestamp="2025-12-06 08:36:55 +0000 UTC" firstStartedPulling="2025-12-06 08:36:56.626629959 +0000 UTC m=+1499.202334997" lastFinishedPulling="2025-12-06 08:36:58.144198752 +0000 UTC m=+1500.719903790" observedRunningTime="2025-12-06 08:36:58.666387441 +0000 UTC m=+1501.242092499" watchObservedRunningTime="2025-12-06 08:36:58.677617395 +0000 UTC m=+1501.253322433" Dec 06 08:37:05 crc kubenswrapper[4763]: I1206 08:37:05.491783 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:05 crc kubenswrapper[4763]: I1206 08:37:05.492305 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:05 crc kubenswrapper[4763]: I1206 08:37:05.545021 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:05 crc kubenswrapper[4763]: I1206 08:37:05.773660 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:05 crc kubenswrapper[4763]: I1206 08:37:05.828750 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:37:07 crc kubenswrapper[4763]: I1206 08:37:07.740086 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xvv4v" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="registry-server" containerID="cri-o://6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8" gracePeriod=2 Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.700751 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.751780 4763 generic.go:334] "Generic (PLEG): container finished" podID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerID="6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8" exitCode=0 Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.751827 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerDied","Data":"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8"} Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.751858 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xvv4v" event={"ID":"470d0e13-f9f9-4cc3-a817-d75b72bd680c","Type":"ContainerDied","Data":"ae4ff986d8b1c0b821755c74fd9a2e7b2493ad8760b42bcc1e7c95d6d53d3724"} Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.751855 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xvv4v" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.751919 4763 scope.go:117] "RemoveContainer" containerID="6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.779221 4763 scope.go:117] "RemoveContainer" containerID="476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.798713 4763 scope.go:117] "RemoveContainer" containerID="f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.820849 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content\") pod \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.821105 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4qnh\" (UniqueName: \"kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh\") pod \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.821215 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities\") pod \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\" (UID: \"470d0e13-f9f9-4cc3-a817-d75b72bd680c\") " Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.823631 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities" (OuterVolumeSpecName: "utilities") pod "470d0e13-f9f9-4cc3-a817-d75b72bd680c" (UID: "470d0e13-f9f9-4cc3-a817-d75b72bd680c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.827621 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh" (OuterVolumeSpecName: "kube-api-access-q4qnh") pod "470d0e13-f9f9-4cc3-a817-d75b72bd680c" (UID: "470d0e13-f9f9-4cc3-a817-d75b72bd680c"). InnerVolumeSpecName "kube-api-access-q4qnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.841287 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "470d0e13-f9f9-4cc3-a817-d75b72bd680c" (UID: "470d0e13-f9f9-4cc3-a817-d75b72bd680c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.910476 4763 scope.go:117] "RemoveContainer" containerID="6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8" Dec 06 08:37:08 crc kubenswrapper[4763]: E1206 08:37:08.911127 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8\": container with ID starting with 6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8 not found: ID does not exist" containerID="6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.911164 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8"} err="failed to get container status \"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8\": rpc error: code = NotFound desc = could not find container \"6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8\": container with ID starting with 6804c2f2d3a4d85e4ee61eac8a68941747666f9349c834fe9f923e4687d1b9e8 not found: ID does not exist" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.911186 4763 scope.go:117] "RemoveContainer" containerID="476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8" Dec 06 08:37:08 crc kubenswrapper[4763]: E1206 08:37:08.911467 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8\": container with ID starting with 476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8 not found: ID does not exist" containerID="476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.911503 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8"} err="failed to get container status \"476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8\": rpc error: code = NotFound desc = could not find container \"476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8\": container with ID starting with 476e0dbbe597ff8ae9660524d95a8ee91ade57e5ea9f5e6fcb5cbcb5f12591a8 not found: ID does not exist" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.911521 4763 scope.go:117] "RemoveContainer" containerID="f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407" Dec 06 08:37:08 crc kubenswrapper[4763]: E1206 08:37:08.911852 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407\": container with ID starting with f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407 not found: ID does not exist" containerID="f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.911879 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407"} err="failed to get container status \"f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407\": rpc error: code = NotFound desc = could not find container \"f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407\": container with ID starting with f22cf17b0882ee16d4af4e5ca18c4b70a0af6d9671e56b7d58b193e59d66e407 not found: ID does not exist" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.925400 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.925436 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470d0e13-f9f9-4cc3-a817-d75b72bd680c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:08 crc kubenswrapper[4763]: I1206 08:37:08.925458 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4qnh\" (UniqueName: \"kubernetes.io/projected/470d0e13-f9f9-4cc3-a817-d75b72bd680c-kube-api-access-q4qnh\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:09 crc kubenswrapper[4763]: I1206 08:37:09.086450 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:37:09 crc kubenswrapper[4763]: I1206 08:37:09.096605 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xvv4v"] Dec 06 08:37:09 crc kubenswrapper[4763]: I1206 08:37:09.729808 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" path="/var/lib/kubelet/pods/470d0e13-f9f9-4cc3-a817-d75b72bd680c/volumes" Dec 06 08:37:12 crc kubenswrapper[4763]: I1206 08:37:12.536521 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:37:12 crc kubenswrapper[4763]: I1206 08:37:12.536875 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.050060 4763 scope.go:117] "RemoveContainer" containerID="99a1bf8d4c3b079d0129a67d319ec3ec91c5fbd5dd9f993498a7de7a07a0d1a4" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.093145 4763 scope.go:117] "RemoveContainer" containerID="a97a1f31ab498dadd08a5629a0af61ee03a0c09180ed36925556155d9231e079" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.122539 4763 scope.go:117] "RemoveContainer" containerID="5fecf6466e0030860e5da8a01d400422b61c12f85723aef7d1d2bdf27836a5bd" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.142809 4763 scope.go:117] "RemoveContainer" containerID="69c8117d016f3c5b76ec6f7f715d7dbef50443bd150be54a5b1d87e801eaf5ec" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.161547 4763 scope.go:117] "RemoveContainer" containerID="45165ad41c822d9525863f060df2a2eba8ae327f7248300d8f6bcc59b3801870" Dec 06 08:37:21 crc kubenswrapper[4763]: I1206 08:37:21.181176 4763 scope.go:117] "RemoveContainer" containerID="f0be8ae559e38706bd319899ca235d4a3618e25ea8505ebbce3d31fdcc0c3537" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.582199 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:25 crc kubenswrapper[4763]: E1206 08:37:25.584132 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="registry-server" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.584164 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="registry-server" Dec 06 08:37:25 crc kubenswrapper[4763]: E1206 08:37:25.584211 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="extract-content" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.584220 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="extract-content" Dec 06 08:37:25 crc kubenswrapper[4763]: E1206 08:37:25.584237 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="extract-utilities" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.584247 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="extract-utilities" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.584559 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="470d0e13-f9f9-4cc3-a817-d75b72bd680c" containerName="registry-server" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.587111 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.592282 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.598271 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.598346 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.598394 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5r4d\" (UniqueName: \"kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.700846 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.700938 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.700995 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5r4d\" (UniqueName: \"kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.701483 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.701812 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.728369 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5r4d\" (UniqueName: \"kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d\") pod \"community-operators-b5v2h\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:25 crc kubenswrapper[4763]: I1206 08:37:25.917334 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:26 crc kubenswrapper[4763]: I1206 08:37:26.427534 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:26 crc kubenswrapper[4763]: I1206 08:37:26.947543 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerID="e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a" exitCode=0 Dec 06 08:37:26 crc kubenswrapper[4763]: I1206 08:37:26.947579 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerDied","Data":"e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a"} Dec 06 08:37:26 crc kubenswrapper[4763]: I1206 08:37:26.947837 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerStarted","Data":"399076fdaf26eb86ec640c0ca259d885d9c27bb14a60072a97e6848c6126324c"} Dec 06 08:37:27 crc kubenswrapper[4763]: I1206 08:37:27.958412 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerStarted","Data":"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb"} Dec 06 08:37:28 crc kubenswrapper[4763]: I1206 08:37:28.971823 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerID="02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb" exitCode=0 Dec 06 08:37:28 crc kubenswrapper[4763]: I1206 08:37:28.971930 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerDied","Data":"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb"} Dec 06 08:37:29 crc kubenswrapper[4763]: I1206 08:37:29.985944 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerStarted","Data":"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966"} Dec 06 08:37:30 crc kubenswrapper[4763]: I1206 08:37:30.021214 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b5v2h" podStartSLOduration=2.5022126719999997 podStartE2EDuration="5.021193641s" podCreationTimestamp="2025-12-06 08:37:25 +0000 UTC" firstStartedPulling="2025-12-06 08:37:26.949328223 +0000 UTC m=+1529.525033281" lastFinishedPulling="2025-12-06 08:37:29.468309212 +0000 UTC m=+1532.044014250" observedRunningTime="2025-12-06 08:37:30.01227736 +0000 UTC m=+1532.587982388" watchObservedRunningTime="2025-12-06 08:37:30.021193641 +0000 UTC m=+1532.596898689" Dec 06 08:37:35 crc kubenswrapper[4763]: I1206 08:37:35.917626 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:35 crc kubenswrapper[4763]: I1206 08:37:35.918225 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:35 crc kubenswrapper[4763]: I1206 08:37:35.963248 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:36 crc kubenswrapper[4763]: I1206 08:37:36.084625 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:36 crc kubenswrapper[4763]: I1206 08:37:36.215463 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.059465 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b5v2h" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="registry-server" containerID="cri-o://bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966" gracePeriod=2 Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.546062 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.647335 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5r4d\" (UniqueName: \"kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d\") pod \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.647417 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities\") pod \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.647544 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content\") pod \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\" (UID: \"3b6d5374-999e-4f00-acfb-9cac2f4b6dae\") " Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.648595 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities" (OuterVolumeSpecName: "utilities") pod "3b6d5374-999e-4f00-acfb-9cac2f4b6dae" (UID: "3b6d5374-999e-4f00-acfb-9cac2f4b6dae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.656232 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d" (OuterVolumeSpecName: "kube-api-access-c5r4d") pod "3b6d5374-999e-4f00-acfb-9cac2f4b6dae" (UID: "3b6d5374-999e-4f00-acfb-9cac2f4b6dae"). InnerVolumeSpecName "kube-api-access-c5r4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.749659 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5r4d\" (UniqueName: \"kubernetes.io/projected/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-kube-api-access-c5r4d\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.749689 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.844952 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b6d5374-999e-4f00-acfb-9cac2f4b6dae" (UID: "3b6d5374-999e-4f00-acfb-9cac2f4b6dae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:38 crc kubenswrapper[4763]: I1206 08:37:38.851347 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b6d5374-999e-4f00-acfb-9cac2f4b6dae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.089711 4763 generic.go:334] "Generic (PLEG): container finished" podID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerID="bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966" exitCode=0 Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.089759 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerDied","Data":"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966"} Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.089788 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5v2h" event={"ID":"3b6d5374-999e-4f00-acfb-9cac2f4b6dae","Type":"ContainerDied","Data":"399076fdaf26eb86ec640c0ca259d885d9c27bb14a60072a97e6848c6126324c"} Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.089804 4763 scope.go:117] "RemoveContainer" containerID="bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.090643 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5v2h" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.110547 4763 scope.go:117] "RemoveContainer" containerID="02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.134070 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.141171 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b5v2h"] Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.148730 4763 scope.go:117] "RemoveContainer" containerID="e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.178340 4763 scope.go:117] "RemoveContainer" containerID="bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966" Dec 06 08:37:39 crc kubenswrapper[4763]: E1206 08:37:39.178766 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966\": container with ID starting with bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966 not found: ID does not exist" containerID="bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.178797 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966"} err="failed to get container status \"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966\": rpc error: code = NotFound desc = could not find container \"bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966\": container with ID starting with bf6a20be97d54f17424c256785d303fc52d86d2409c4612d6ebfd433922fd966 not found: ID does not exist" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.178819 4763 scope.go:117] "RemoveContainer" containerID="02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb" Dec 06 08:37:39 crc kubenswrapper[4763]: E1206 08:37:39.179182 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb\": container with ID starting with 02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb not found: ID does not exist" containerID="02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.179203 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb"} err="failed to get container status \"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb\": rpc error: code = NotFound desc = could not find container \"02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb\": container with ID starting with 02f682b8f24545644a2e8c2d73b520827ddc39864fc7823a7fab7e65ec2ac4bb not found: ID does not exist" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.179215 4763 scope.go:117] "RemoveContainer" containerID="e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a" Dec 06 08:37:39 crc kubenswrapper[4763]: E1206 08:37:39.179400 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a\": container with ID starting with e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a not found: ID does not exist" containerID="e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.179422 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a"} err="failed to get container status \"e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a\": rpc error: code = NotFound desc = could not find container \"e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a\": container with ID starting with e2abcfbcb249b1fc0779c266aaf7440f0aef6b90ed901ae0a9c5a6132fc27f4a not found: ID does not exist" Dec 06 08:37:39 crc kubenswrapper[4763]: I1206 08:37:39.730814 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" path="/var/lib/kubelet/pods/3b6d5374-999e-4f00-acfb-9cac2f4b6dae/volumes" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.625914 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:41 crc kubenswrapper[4763]: E1206 08:37:41.626329 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="extract-content" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.626344 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="extract-content" Dec 06 08:37:41 crc kubenswrapper[4763]: E1206 08:37:41.626370 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="extract-utilities" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.626378 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="extract-utilities" Dec 06 08:37:41 crc kubenswrapper[4763]: E1206 08:37:41.626391 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="registry-server" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.626402 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="registry-server" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.626625 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b6d5374-999e-4f00-acfb-9cac2f4b6dae" containerName="registry-server" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.628614 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.658401 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.704141 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.704324 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.704476 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzpdw\" (UniqueName: \"kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.806598 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzpdw\" (UniqueName: \"kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.806814 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.806924 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.807582 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.807631 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.825401 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzpdw\" (UniqueName: \"kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw\") pod \"certified-operators-plzx5\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:41 crc kubenswrapper[4763]: I1206 08:37:41.963441 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:42 crc kubenswrapper[4763]: I1206 08:37:42.537387 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:37:42 crc kubenswrapper[4763]: I1206 08:37:42.537754 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:37:42 crc kubenswrapper[4763]: I1206 08:37:42.621097 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:42 crc kubenswrapper[4763]: W1206 08:37:42.636425 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f9b9894_1fa4_493e_8e43_5ed50e8ae1d7.slice/crio-9ba1d05a113f6af5d14e883dd8a3d3d05d0dd08dfd706cccc749cee7564c297f WatchSource:0}: Error finding container 9ba1d05a113f6af5d14e883dd8a3d3d05d0dd08dfd706cccc749cee7564c297f: Status 404 returned error can't find the container with id 9ba1d05a113f6af5d14e883dd8a3d3d05d0dd08dfd706cccc749cee7564c297f Dec 06 08:37:43 crc kubenswrapper[4763]: I1206 08:37:43.132923 4763 generic.go:334] "Generic (PLEG): container finished" podID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerID="2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8" exitCode=0 Dec 06 08:37:43 crc kubenswrapper[4763]: I1206 08:37:43.132970 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerDied","Data":"2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8"} Dec 06 08:37:43 crc kubenswrapper[4763]: I1206 08:37:43.132996 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerStarted","Data":"9ba1d05a113f6af5d14e883dd8a3d3d05d0dd08dfd706cccc749cee7564c297f"} Dec 06 08:37:45 crc kubenswrapper[4763]: I1206 08:37:45.158929 4763 generic.go:334] "Generic (PLEG): container finished" podID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerID="28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5" exitCode=0 Dec 06 08:37:45 crc kubenswrapper[4763]: I1206 08:37:45.158987 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerDied","Data":"28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5"} Dec 06 08:37:46 crc kubenswrapper[4763]: I1206 08:37:46.171831 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerStarted","Data":"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237"} Dec 06 08:37:46 crc kubenswrapper[4763]: I1206 08:37:46.193475 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-plzx5" podStartSLOduration=2.574914113 podStartE2EDuration="5.193456774s" podCreationTimestamp="2025-12-06 08:37:41 +0000 UTC" firstStartedPulling="2025-12-06 08:37:43.134929765 +0000 UTC m=+1545.710634803" lastFinishedPulling="2025-12-06 08:37:45.753472426 +0000 UTC m=+1548.329177464" observedRunningTime="2025-12-06 08:37:46.188813307 +0000 UTC m=+1548.764518355" watchObservedRunningTime="2025-12-06 08:37:46.193456774 +0000 UTC m=+1548.769161812" Dec 06 08:37:51 crc kubenswrapper[4763]: I1206 08:37:51.964426 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:52 crc kubenswrapper[4763]: I1206 08:37:52.006281 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:52 crc kubenswrapper[4763]: I1206 08:37:52.066153 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:52 crc kubenswrapper[4763]: I1206 08:37:52.278782 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:52 crc kubenswrapper[4763]: I1206 08:37:52.331553 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.245114 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-plzx5" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="registry-server" containerID="cri-o://92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237" gracePeriod=2 Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.719528 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.772372 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities\") pod \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.772511 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzpdw\" (UniqueName: \"kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw\") pod \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.772699 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content\") pod \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\" (UID: \"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7\") " Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.773732 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities" (OuterVolumeSpecName: "utilities") pod "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" (UID: "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.774298 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.779259 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw" (OuterVolumeSpecName: "kube-api-access-rzpdw") pod "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" (UID: "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7"). InnerVolumeSpecName "kube-api-access-rzpdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.836978 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" (UID: "7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.876553 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzpdw\" (UniqueName: \"kubernetes.io/projected/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-kube-api-access-rzpdw\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:54 crc kubenswrapper[4763]: I1206 08:37:54.876585 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.255211 4763 generic.go:334] "Generic (PLEG): container finished" podID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerID="92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237" exitCode=0 Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.255256 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-plzx5" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.255280 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerDied","Data":"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237"} Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.256864 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-plzx5" event={"ID":"7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7","Type":"ContainerDied","Data":"9ba1d05a113f6af5d14e883dd8a3d3d05d0dd08dfd706cccc749cee7564c297f"} Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.256885 4763 scope.go:117] "RemoveContainer" containerID="92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.284230 4763 scope.go:117] "RemoveContainer" containerID="28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.290598 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.300026 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-plzx5"] Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.318402 4763 scope.go:117] "RemoveContainer" containerID="2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.365130 4763 scope.go:117] "RemoveContainer" containerID="92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237" Dec 06 08:37:55 crc kubenswrapper[4763]: E1206 08:37:55.365633 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237\": container with ID starting with 92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237 not found: ID does not exist" containerID="92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.365682 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237"} err="failed to get container status \"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237\": rpc error: code = NotFound desc = could not find container \"92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237\": container with ID starting with 92022ed3be4dfb9e4343eb6942533d8111766e98f79600f6adc181d04b8bb237 not found: ID does not exist" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.365714 4763 scope.go:117] "RemoveContainer" containerID="28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5" Dec 06 08:37:55 crc kubenswrapper[4763]: E1206 08:37:55.366142 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5\": container with ID starting with 28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5 not found: ID does not exist" containerID="28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.366165 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5"} err="failed to get container status \"28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5\": rpc error: code = NotFound desc = could not find container \"28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5\": container with ID starting with 28b804f06bf3aa668311250edac2b9a1ee25a7cf48e4b643a47837d8bfecefa5 not found: ID does not exist" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.366190 4763 scope.go:117] "RemoveContainer" containerID="2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8" Dec 06 08:37:55 crc kubenswrapper[4763]: E1206 08:37:55.366478 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8\": container with ID starting with 2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8 not found: ID does not exist" containerID="2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.366531 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8"} err="failed to get container status \"2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8\": rpc error: code = NotFound desc = could not find container \"2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8\": container with ID starting with 2bf76a86f0757d27c752eb238d60be409c7b2e4707399063bbb59c3281b799e8 not found: ID does not exist" Dec 06 08:37:55 crc kubenswrapper[4763]: I1206 08:37:55.751659 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" path="/var/lib/kubelet/pods/7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7/volumes" Dec 06 08:38:12 crc kubenswrapper[4763]: I1206 08:38:12.536774 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:38:12 crc kubenswrapper[4763]: I1206 08:38:12.537450 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:38:12 crc kubenswrapper[4763]: I1206 08:38:12.537502 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:38:12 crc kubenswrapper[4763]: I1206 08:38:12.538378 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:38:12 crc kubenswrapper[4763]: I1206 08:38:12.538442 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" gracePeriod=600 Dec 06 08:38:12 crc kubenswrapper[4763]: E1206 08:38:12.664474 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:38:13 crc kubenswrapper[4763]: I1206 08:38:13.436405 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" exitCode=0 Dec 06 08:38:13 crc kubenswrapper[4763]: I1206 08:38:13.436473 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206"} Dec 06 08:38:13 crc kubenswrapper[4763]: I1206 08:38:13.437084 4763 scope.go:117] "RemoveContainer" containerID="12049d13410239289c3450e1ce76dfa60781d6b25fb180e7241cdfee5b8c3dbd" Dec 06 08:38:13 crc kubenswrapper[4763]: I1206 08:38:13.438315 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:38:13 crc kubenswrapper[4763]: E1206 08:38:13.438769 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.273347 4763 scope.go:117] "RemoveContainer" containerID="b5689e6009818d0fbc3318c082b970bbdb6c0131b8b49c26f72354c54b59b2d1" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.295015 4763 scope.go:117] "RemoveContainer" containerID="3b5d636734408a792ee7c67ff4b9636f2127f9e650577e576161174b7cff6c31" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.316121 4763 scope.go:117] "RemoveContainer" containerID="5942b76ccdc6d26f7c14bb1e6f052fe51d80f7094f8e26e26d2ab08868706bf2" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.346729 4763 scope.go:117] "RemoveContainer" containerID="f0712d5b17eff9cfa0b28eee4e4d3b4fde40c63ae551fa97facc50762002bb36" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.390507 4763 scope.go:117] "RemoveContainer" containerID="bfe5dbae931004786a2a8148052c457a45ec6acc21921d30966615d35de3a65b" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.410808 4763 scope.go:117] "RemoveContainer" containerID="79ba783d9368d66361908c0ff5a6fd9aeaf1d4865b368239aecfd36fdc7d13ce" Dec 06 08:38:21 crc kubenswrapper[4763]: I1206 08:38:21.433955 4763 scope.go:117] "RemoveContainer" containerID="c6ee4a5644273508d844ddb9513f23b938dc9de3adccd204f19a2deeade5ec7e" Dec 06 08:38:27 crc kubenswrapper[4763]: I1206 08:38:27.726445 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:38:27 crc kubenswrapper[4763]: E1206 08:38:27.727180 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:38:39 crc kubenswrapper[4763]: I1206 08:38:39.720352 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:38:39 crc kubenswrapper[4763]: E1206 08:38:39.721606 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.046166 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-40e8-account-create-update-hpdth"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.057397 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-thw8d"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.068227 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-lkfrr"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.077584 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5c8f-account-create-update-cr74m"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.086542 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-40e8-account-create-update-hpdth"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.097969 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5c8f-account-create-update-cr74m"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.106546 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-lkfrr"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.114633 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-thw8d"] Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.733356 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1447b4d5-1e23-43a9-9877-7e5ed71f3c72" path="/var/lib/kubelet/pods/1447b4d5-1e23-43a9-9877-7e5ed71f3c72/volumes" Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.734490 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35fd0671-e6bc-429c-a2e5-2f7757c7cda4" path="/var/lib/kubelet/pods/35fd0671-e6bc-429c-a2e5-2f7757c7cda4/volumes" Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.735110 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7728e4ef-8ef4-414c-9cd9-274b386d59bb" path="/var/lib/kubelet/pods/7728e4ef-8ef4-414c-9cd9-274b386d59bb/volumes" Dec 06 08:38:47 crc kubenswrapper[4763]: I1206 08:38:47.735628 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb37a078-cd5e-4ca9-aac8-068f790e3a5a" path="/var/lib/kubelet/pods/cb37a078-cd5e-4ca9-aac8-068f790e3a5a/volumes" Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.027560 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-xqr8s"] Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.036153 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-8ebe-account-create-update-b574x"] Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.045163 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-xqr8s"] Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.053318 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-8ebe-account-create-update-b574x"] Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.737992 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92a312a1-7032-4fa4-b14a-0874d22ac4ee" path="/var/lib/kubelet/pods/92a312a1-7032-4fa4-b14a-0874d22ac4ee/volumes" Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.739645 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2355f72-6850-4c5e-ae74-d0525a8bd9e3" path="/var/lib/kubelet/pods/e2355f72-6850-4c5e-ae74-d0525a8bd9e3/volumes" Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.796061 4763 generic.go:334] "Generic (PLEG): container finished" podID="b775bb6f-096f-4232-b395-664dce5d049b" containerID="b6de7f459ca40a7bc2885569c69091c4be64cb2f03f281a161095f58d464b70d" exitCode=0 Dec 06 08:38:49 crc kubenswrapper[4763]: I1206 08:38:49.796111 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" event={"ID":"b775bb6f-096f-4232-b395-664dce5d049b","Type":"ContainerDied","Data":"b6de7f459ca40a7bc2885569c69091c4be64cb2f03f281a161095f58d464b70d"} Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.213672 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.295215 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory\") pod \"b775bb6f-096f-4232-b395-664dce5d049b\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.295288 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm7sr\" (UniqueName: \"kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr\") pod \"b775bb6f-096f-4232-b395-664dce5d049b\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.295417 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key\") pod \"b775bb6f-096f-4232-b395-664dce5d049b\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.295466 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle\") pod \"b775bb6f-096f-4232-b395-664dce5d049b\" (UID: \"b775bb6f-096f-4232-b395-664dce5d049b\") " Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.301480 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b775bb6f-096f-4232-b395-664dce5d049b" (UID: "b775bb6f-096f-4232-b395-664dce5d049b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.301494 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr" (OuterVolumeSpecName: "kube-api-access-qm7sr") pod "b775bb6f-096f-4232-b395-664dce5d049b" (UID: "b775bb6f-096f-4232-b395-664dce5d049b"). InnerVolumeSpecName "kube-api-access-qm7sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.327268 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory" (OuterVolumeSpecName: "inventory") pod "b775bb6f-096f-4232-b395-664dce5d049b" (UID: "b775bb6f-096f-4232-b395-664dce5d049b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.331578 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b775bb6f-096f-4232-b395-664dce5d049b" (UID: "b775bb6f-096f-4232-b395-664dce5d049b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.397365 4763 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.397399 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.397408 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm7sr\" (UniqueName: \"kubernetes.io/projected/b775bb6f-096f-4232-b395-664dce5d049b-kube-api-access-qm7sr\") on node \"crc\" DevicePath \"\"" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.397417 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b775bb6f-096f-4232-b395-664dce5d049b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.814049 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" event={"ID":"b775bb6f-096f-4232-b395-664dce5d049b","Type":"ContainerDied","Data":"22309fb858a74ddae95bcd6db589599eda89d85d0cb12638a8756b7a20d166bc"} Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.814337 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22309fb858a74ddae95bcd6db589599eda89d85d0cb12638a8756b7a20d166bc" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.814102 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.907953 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487"] Dec 06 08:38:51 crc kubenswrapper[4763]: E1206 08:38:51.908587 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b775bb6f-096f-4232-b395-664dce5d049b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908606 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b775bb6f-096f-4232-b395-664dce5d049b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 06 08:38:51 crc kubenswrapper[4763]: E1206 08:38:51.908629 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="registry-server" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908636 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="registry-server" Dec 06 08:38:51 crc kubenswrapper[4763]: E1206 08:38:51.908659 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="extract-utilities" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908665 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="extract-utilities" Dec 06 08:38:51 crc kubenswrapper[4763]: E1206 08:38:51.908692 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="extract-content" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908698 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="extract-content" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908931 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f9b9894-1fa4-493e-8e43-5ed50e8ae1d7" containerName="registry-server" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.908957 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b775bb6f-096f-4232-b395-664dce5d049b" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.909668 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.912160 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.912417 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.912549 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.912661 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:38:51 crc kubenswrapper[4763]: I1206 08:38:51.921525 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487"] Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.007756 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.007796 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.008037 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb9xc\" (UniqueName: \"kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.109040 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.109097 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.109223 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb9xc\" (UniqueName: \"kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.113559 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.113848 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.125592 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb9xc\" (UniqueName: \"kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-d4487\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.228503 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.735917 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487"] Dec 06 08:38:52 crc kubenswrapper[4763]: I1206 08:38:52.824498 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" event={"ID":"8506a201-e6ba-4f5f-b637-e0dccab9caea","Type":"ContainerStarted","Data":"50dab2d71fbe10cdfcd3b7fd2603a7f4dce9444907c7b1ffa68586aa6ef5c0bd"} Dec 06 08:38:53 crc kubenswrapper[4763]: I1206 08:38:53.719373 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:38:53 crc kubenswrapper[4763]: E1206 08:38:53.720000 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:38:53 crc kubenswrapper[4763]: I1206 08:38:53.835206 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" event={"ID":"8506a201-e6ba-4f5f-b637-e0dccab9caea","Type":"ContainerStarted","Data":"6398c4fc8dd0beea01376e6a47d868b05d4777cf06e599a8cc72b1d0a85016b9"} Dec 06 08:38:53 crc kubenswrapper[4763]: I1206 08:38:53.859378 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" podStartSLOduration=2.391296797 podStartE2EDuration="2.859342893s" podCreationTimestamp="2025-12-06 08:38:51 +0000 UTC" firstStartedPulling="2025-12-06 08:38:52.740766619 +0000 UTC m=+1615.316471657" lastFinishedPulling="2025-12-06 08:38:53.208812725 +0000 UTC m=+1615.784517753" observedRunningTime="2025-12-06 08:38:53.847803022 +0000 UTC m=+1616.423508070" watchObservedRunningTime="2025-12-06 08:38:53.859342893 +0000 UTC m=+1616.435047951" Dec 06 08:39:06 crc kubenswrapper[4763]: I1206 08:39:06.720171 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:39:06 crc kubenswrapper[4763]: E1206 08:39:06.721428 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:39:19 crc kubenswrapper[4763]: I1206 08:39:19.719570 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:39:19 crc kubenswrapper[4763]: E1206 08:39:19.720408 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.564408 4763 scope.go:117] "RemoveContainer" containerID="f63919fa96a37f34a635bc08d1a30da474a7ebf0eb7f03cca87ec7e9244be44f" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.587046 4763 scope.go:117] "RemoveContainer" containerID="3edfb35313457a44f4e0fe1f1a6a82372499f4f074dcf5cf167a3d4b3f2891fa" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.638135 4763 scope.go:117] "RemoveContainer" containerID="b3315cec3c6162ea65cd93ee402ec90d5bede19a1224465d073abfdafaafe894" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.686352 4763 scope.go:117] "RemoveContainer" containerID="a9f6e2f74a8eef61a2bece0e2c49b2a6dc4e71dc982e2e231ca1db088a9cd4a8" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.731856 4763 scope.go:117] "RemoveContainer" containerID="2d8662c742bc20d9ad64c1b9af85d0d5102acae432575aa41c3e5a00d287af62" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.756932 4763 scope.go:117] "RemoveContainer" containerID="806d2244100fa5497cbafd99d48d832c9cd284895e9dc7f0cefc187c47fc7f35" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.782859 4763 scope.go:117] "RemoveContainer" containerID="22bec48f316f01d16ca86d978b34ee86213c26e22f2d050665ecde25d93f82bf" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.835092 4763 scope.go:117] "RemoveContainer" containerID="3b15c58d58c885565088ed306767d38f556e333a2d8260d50a63aa1bc9fee519" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.855109 4763 scope.go:117] "RemoveContainer" containerID="820221b51ea1e461602886c5dd8f31a97627314c704eb323ff413fc48e7858ab" Dec 06 08:39:21 crc kubenswrapper[4763]: I1206 08:39:21.902489 4763 scope.go:117] "RemoveContainer" containerID="3d89cc63d31fb14cc2f1250eee02dfc4c5759af8f928fca3a1c284cb17637b0e" Dec 06 08:39:24 crc kubenswrapper[4763]: I1206 08:39:24.038867 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-ebdf-account-create-update-ksnmx"] Dec 06 08:39:24 crc kubenswrapper[4763]: I1206 08:39:24.048296 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-gb2pf"] Dec 06 08:39:24 crc kubenswrapper[4763]: I1206 08:39:24.057862 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-ebdf-account-create-update-ksnmx"] Dec 06 08:39:24 crc kubenswrapper[4763]: I1206 08:39:24.067011 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-gb2pf"] Dec 06 08:39:25 crc kubenswrapper[4763]: I1206 08:39:25.730405 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05a15474-6053-4ab8-8c89-52d1289f9397" path="/var/lib/kubelet/pods/05a15474-6053-4ab8-8c89-52d1289f9397/volumes" Dec 06 08:39:25 crc kubenswrapper[4763]: I1206 08:39:25.731305 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9455395-06d9-4bc9-9ea6-183dd115655c" path="/var/lib/kubelet/pods/e9455395-06d9-4bc9-9ea6-183dd115655c/volumes" Dec 06 08:39:28 crc kubenswrapper[4763]: I1206 08:39:28.026597 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b714-account-create-update-7m2lw"] Dec 06 08:39:28 crc kubenswrapper[4763]: I1206 08:39:28.038347 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b714-account-create-update-7m2lw"] Dec 06 08:39:29 crc kubenswrapper[4763]: I1206 08:39:29.732537 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a23d4c9f-a447-4694-8b8f-40c6b7fba10c" path="/var/lib/kubelet/pods/a23d4c9f-a447-4694-8b8f-40c6b7fba10c/volumes" Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.031476 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-lzmz8"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.039816 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3b7d-account-create-update-2ts6f"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.050392 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-szpbq"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.060625 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-f887-account-create-update-2tnbk"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.067913 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3b7d-account-create-update-2ts6f"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.076419 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-j8569"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.086064 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-szpbq"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.094179 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-lzmz8"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.101877 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-f887-account-create-update-2tnbk"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.109123 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-j8569"] Dec 06 08:39:32 crc kubenswrapper[4763]: I1206 08:39:32.719500 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:39:32 crc kubenswrapper[4763]: E1206 08:39:32.719863 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:39:33 crc kubenswrapper[4763]: I1206 08:39:33.732548 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49a57c04-2386-4579-996e-664b1b58349d" path="/var/lib/kubelet/pods/49a57c04-2386-4579-996e-664b1b58349d/volumes" Dec 06 08:39:33 crc kubenswrapper[4763]: I1206 08:39:33.734238 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80dd64d4-0026-4111-b2d3-7428956da9ab" path="/var/lib/kubelet/pods/80dd64d4-0026-4111-b2d3-7428956da9ab/volumes" Dec 06 08:39:33 crc kubenswrapper[4763]: I1206 08:39:33.734866 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b061ca68-5423-4891-a883-f5ed470789e8" path="/var/lib/kubelet/pods/b061ca68-5423-4891-a883-f5ed470789e8/volumes" Dec 06 08:39:33 crc kubenswrapper[4763]: I1206 08:39:33.735499 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d19920e9-3eff-4701-a81c-872cdf0f424c" path="/var/lib/kubelet/pods/d19920e9-3eff-4701-a81c-872cdf0f424c/volumes" Dec 06 08:39:33 crc kubenswrapper[4763]: I1206 08:39:33.736574 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8e347b3-3740-4fff-aca7-d6204175fda4" path="/var/lib/kubelet/pods/e8e347b3-3740-4fff-aca7-d6204175fda4/volumes" Dec 06 08:39:44 crc kubenswrapper[4763]: I1206 08:39:44.720283 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:39:44 crc kubenswrapper[4763]: E1206 08:39:44.721190 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.087980 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-97llg"] Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.117233 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-kr9kp"] Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.129035 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-kr9kp"] Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.136660 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-97llg"] Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.730746 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53409f27-f50b-4048-b355-8bc4b6956cf5" path="/var/lib/kubelet/pods/53409f27-f50b-4048-b355-8bc4b6956cf5/volumes" Dec 06 08:39:45 crc kubenswrapper[4763]: I1206 08:39:45.732424 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c133dec0-ffee-47f1-949f-72aeeed1163c" path="/var/lib/kubelet/pods/c133dec0-ffee-47f1-949f-72aeeed1163c/volumes" Dec 06 08:39:56 crc kubenswrapper[4763]: I1206 08:39:56.719633 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:39:56 crc kubenswrapper[4763]: E1206 08:39:56.720654 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:40:11 crc kubenswrapper[4763]: I1206 08:40:11.719217 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:40:11 crc kubenswrapper[4763]: E1206 08:40:11.720042 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:40:18 crc kubenswrapper[4763]: I1206 08:40:18.041584 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-vpjrt"] Dec 06 08:40:18 crc kubenswrapper[4763]: I1206 08:40:18.050364 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-vpjrt"] Dec 06 08:40:19 crc kubenswrapper[4763]: I1206 08:40:19.732170 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeb85f24-e43f-4083-a8a2-1d0beebee795" path="/var/lib/kubelet/pods/eeb85f24-e43f-4083-a8a2-1d0beebee795/volumes" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.040694 4763 scope.go:117] "RemoveContainer" containerID="5329b5ad7d5b6d685691442e507b6e7b6ed1450233002bdfb04d8bac5d952d24" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.080621 4763 scope.go:117] "RemoveContainer" containerID="28df4f21ed388517c4df0ec401abe953379ef3fba72984b72f46444d1604e9df" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.109147 4763 scope.go:117] "RemoveContainer" containerID="875ca722d624a25b1b67554b38e6d2b0f71e4c7fe2c0af499b19dee802a2c068" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.151797 4763 scope.go:117] "RemoveContainer" containerID="26bedac173fd8f4092dde4f4ddb33446080392f51581663f24eb704a015743b6" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.210213 4763 scope.go:117] "RemoveContainer" containerID="3e182e52bd93fcf2a9ca40dd38bdfa54e673289b70a059f736f486687788006f" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.278780 4763 scope.go:117] "RemoveContainer" containerID="717ac97dc903c4c6956bfd29cc90915c7937bdeff1a5b2d435f9abc4f3d71af2" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.326489 4763 scope.go:117] "RemoveContainer" containerID="bcb0f98ba13b6825e0a12be2db7d686986f96caf4184b73b20ffcf7a5c8dd850" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.347711 4763 scope.go:117] "RemoveContainer" containerID="33e3fbf2c5e371b0320c3302d2acca85deae22d856cebd53650ad3bfe9f8606d" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.367679 4763 scope.go:117] "RemoveContainer" containerID="36bc5390de3a8fbf12d47ec663649b1af12cad35173b670c6fe6e586bc2c536e" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.389870 4763 scope.go:117] "RemoveContainer" containerID="94ab73bf2720f4f7927f40391b6d98aea89d4f03fe83e1c60d1ab87047875f54" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.418004 4763 scope.go:117] "RemoveContainer" containerID="16ca94014b570767eec8f14789ce7cbb63a7d2e105767b9c7cce4b507446f71b" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.464384 4763 scope.go:117] "RemoveContainer" containerID="6114dfaf83a0d4cdbcc31eeab0b3e4db112aeef5a4206e798cc2639d73e94d57" Dec 06 08:40:22 crc kubenswrapper[4763]: I1206 08:40:22.489470 4763 scope.go:117] "RemoveContainer" containerID="ac9861d86e623d371b30a90c4d4bfa87b0d6ffffd0aff19f4c3288903c619f74" Dec 06 08:40:25 crc kubenswrapper[4763]: I1206 08:40:25.720948 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:40:25 crc kubenswrapper[4763]: E1206 08:40:25.721581 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:40:26 crc kubenswrapper[4763]: I1206 08:40:26.027017 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-2j26g"] Dec 06 08:40:26 crc kubenswrapper[4763]: I1206 08:40:26.035948 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-2j26g"] Dec 06 08:40:27 crc kubenswrapper[4763]: I1206 08:40:27.729947 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6214296-e09d-4c7a-a0ec-2d232793129f" path="/var/lib/kubelet/pods/e6214296-e09d-4c7a-a0ec-2d232793129f/volumes" Dec 06 08:40:32 crc kubenswrapper[4763]: I1206 08:40:32.030462 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-54khp"] Dec 06 08:40:32 crc kubenswrapper[4763]: I1206 08:40:32.038357 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-54khp"] Dec 06 08:40:33 crc kubenswrapper[4763]: I1206 08:40:33.732097 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03ab1923-fd90-45e6-9513-4ccc9b59667b" path="/var/lib/kubelet/pods/03ab1923-fd90-45e6-9513-4ccc9b59667b/volumes" Dec 06 08:40:34 crc kubenswrapper[4763]: I1206 08:40:34.032325 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-f8hjt"] Dec 06 08:40:34 crc kubenswrapper[4763]: I1206 08:40:34.044416 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-f8hjt"] Dec 06 08:40:35 crc kubenswrapper[4763]: I1206 08:40:35.731282 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e8b45c-35a8-4407-849b-774bd681bf75" path="/var/lib/kubelet/pods/27e8b45c-35a8-4407-849b-774bd681bf75/volumes" Dec 06 08:40:38 crc kubenswrapper[4763]: I1206 08:40:38.719646 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:40:38 crc kubenswrapper[4763]: E1206 08:40:38.720383 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:40:44 crc kubenswrapper[4763]: I1206 08:40:44.941787 4763 generic.go:334] "Generic (PLEG): container finished" podID="8506a201-e6ba-4f5f-b637-e0dccab9caea" containerID="6398c4fc8dd0beea01376e6a47d868b05d4777cf06e599a8cc72b1d0a85016b9" exitCode=0 Dec 06 08:40:44 crc kubenswrapper[4763]: I1206 08:40:44.941847 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" event={"ID":"8506a201-e6ba-4f5f-b637-e0dccab9caea","Type":"ContainerDied","Data":"6398c4fc8dd0beea01376e6a47d868b05d4777cf06e599a8cc72b1d0a85016b9"} Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.350831 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.458797 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory\") pod \"8506a201-e6ba-4f5f-b637-e0dccab9caea\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.458945 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key\") pod \"8506a201-e6ba-4f5f-b637-e0dccab9caea\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.459043 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb9xc\" (UniqueName: \"kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc\") pod \"8506a201-e6ba-4f5f-b637-e0dccab9caea\" (UID: \"8506a201-e6ba-4f5f-b637-e0dccab9caea\") " Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.464381 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc" (OuterVolumeSpecName: "kube-api-access-fb9xc") pod "8506a201-e6ba-4f5f-b637-e0dccab9caea" (UID: "8506a201-e6ba-4f5f-b637-e0dccab9caea"). InnerVolumeSpecName "kube-api-access-fb9xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.487349 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8506a201-e6ba-4f5f-b637-e0dccab9caea" (UID: "8506a201-e6ba-4f5f-b637-e0dccab9caea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.488175 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory" (OuterVolumeSpecName: "inventory") pod "8506a201-e6ba-4f5f-b637-e0dccab9caea" (UID: "8506a201-e6ba-4f5f-b637-e0dccab9caea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.561052 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.561089 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8506a201-e6ba-4f5f-b637-e0dccab9caea-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.561098 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb9xc\" (UniqueName: \"kubernetes.io/projected/8506a201-e6ba-4f5f-b637-e0dccab9caea-kube-api-access-fb9xc\") on node \"crc\" DevicePath \"\"" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.961124 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" event={"ID":"8506a201-e6ba-4f5f-b637-e0dccab9caea","Type":"ContainerDied","Data":"50dab2d71fbe10cdfcd3b7fd2603a7f4dce9444907c7b1ffa68586aa6ef5c0bd"} Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.961169 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50dab2d71fbe10cdfcd3b7fd2603a7f4dce9444907c7b1ffa68586aa6ef5c0bd" Dec 06 08:40:46 crc kubenswrapper[4763]: I1206 08:40:46.961177 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-d4487" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.050819 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks"] Dec 06 08:40:47 crc kubenswrapper[4763]: E1206 08:40:47.051423 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8506a201-e6ba-4f5f-b637-e0dccab9caea" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.051451 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8506a201-e6ba-4f5f-b637-e0dccab9caea" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.051718 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="8506a201-e6ba-4f5f-b637-e0dccab9caea" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.052919 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.056813 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.056813 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.056970 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.057609 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.070277 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.070384 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.070452 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw2tz\" (UniqueName: \"kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.072523 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jffbq"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.087545 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-888r5"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.096309 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-888r5"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.104691 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jffbq"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.112202 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.172844 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw2tz\" (UniqueName: \"kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.172990 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.173093 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.180459 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.180467 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.194880 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw2tz\" (UniqueName: \"kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-526ks\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.388546 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.733402 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f62869c-d491-4a12-a88c-1a58ef5b1bea" path="/var/lib/kubelet/pods/4f62869c-d491-4a12-a88c-1a58ef5b1bea/volumes" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.735541 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5015508-305d-4f07-a137-85149d98f662" path="/var/lib/kubelet/pods/e5015508-305d-4f07-a137-85149d98f662/volumes" Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.930805 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks"] Dec 06 08:40:47 crc kubenswrapper[4763]: I1206 08:40:47.972954 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" event={"ID":"6463dad3-6446-4186-9c4f-39264a7f8679","Type":"ContainerStarted","Data":"e7c14ce41ddf3c2c666b77d4e7628bf861bf171f2b5f09314e98396a3d69d4c9"} Dec 06 08:40:48 crc kubenswrapper[4763]: I1206 08:40:48.985328 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" event={"ID":"6463dad3-6446-4186-9c4f-39264a7f8679","Type":"ContainerStarted","Data":"6c01eab12f72cf8818f44efc27456c225e7c69929cae3358cddd140ac75f1382"} Dec 06 08:40:49 crc kubenswrapper[4763]: I1206 08:40:49.012527 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" podStartSLOduration=1.485330852 podStartE2EDuration="2.012499835s" podCreationTimestamp="2025-12-06 08:40:47 +0000 UTC" firstStartedPulling="2025-12-06 08:40:47.938416697 +0000 UTC m=+1730.514121735" lastFinishedPulling="2025-12-06 08:40:48.46558568 +0000 UTC m=+1731.041290718" observedRunningTime="2025-12-06 08:40:49.002174856 +0000 UTC m=+1731.577879894" watchObservedRunningTime="2025-12-06 08:40:49.012499835 +0000 UTC m=+1731.588204883" Dec 06 08:40:50 crc kubenswrapper[4763]: I1206 08:40:50.720923 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:40:50 crc kubenswrapper[4763]: E1206 08:40:50.722040 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:05 crc kubenswrapper[4763]: I1206 08:41:05.720319 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:41:05 crc kubenswrapper[4763]: E1206 08:41:05.721132 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:19 crc kubenswrapper[4763]: I1206 08:41:19.720019 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:41:19 crc kubenswrapper[4763]: E1206 08:41:19.721016 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:22 crc kubenswrapper[4763]: I1206 08:41:22.747669 4763 scope.go:117] "RemoveContainer" containerID="543c64ae689480077e756c5921088c9cdf08b4b3459cab34200bc4c7d71087a2" Dec 06 08:41:22 crc kubenswrapper[4763]: I1206 08:41:22.780522 4763 scope.go:117] "RemoveContainer" containerID="d5a713202af766136564c2d9068a0b2056e360a876a235daf463af5a38943249" Dec 06 08:41:22 crc kubenswrapper[4763]: I1206 08:41:22.820601 4763 scope.go:117] "RemoveContainer" containerID="50ed0054acca71984db7fe90fdcc282ca7d62503ab260ed012e283197f39540c" Dec 06 08:41:22 crc kubenswrapper[4763]: I1206 08:41:22.873333 4763 scope.go:117] "RemoveContainer" containerID="b281a7e9f56f3253c2eaf1973115c28ec9a09af7202a550620dbe038d02bb95a" Dec 06 08:41:22 crc kubenswrapper[4763]: I1206 08:41:22.928969 4763 scope.go:117] "RemoveContainer" containerID="d0221a9f60fe666a65ee57a39ae231be35ff7085b392d1f5d3f775778ad2b463" Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.039739 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-6f65-account-create-update-h52kq"] Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.054915 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-e43f-account-create-update-jj5wk"] Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.063925 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-e43f-account-create-update-jj5wk"] Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.073883 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-6f65-account-create-update-h52kq"] Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.729793 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c2ea22-f27e-4a11-bda3-17b509191246" path="/var/lib/kubelet/pods/41c2ea22-f27e-4a11-bda3-17b509191246/volumes" Dec 06 08:41:27 crc kubenswrapper[4763]: I1206 08:41:27.730497 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cee19965-9240-4933-8864-fd187283c3ba" path="/var/lib/kubelet/pods/cee19965-9240-4933-8864-fd187283c3ba/volumes" Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.029633 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-rlbsb"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.037816 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0494-account-create-update-bnvcj"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.046114 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-ksdhn"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.053393 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-w27ss"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.061100 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-rlbsb"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.069274 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0494-account-create-update-bnvcj"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.077173 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-w27ss"] Dec 06 08:41:28 crc kubenswrapper[4763]: I1206 08:41:28.085793 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-ksdhn"] Dec 06 08:41:29 crc kubenswrapper[4763]: I1206 08:41:29.729732 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69dcf3fd-83ee-487e-9664-bf72b745d236" path="/var/lib/kubelet/pods/69dcf3fd-83ee-487e-9664-bf72b745d236/volumes" Dec 06 08:41:29 crc kubenswrapper[4763]: I1206 08:41:29.730326 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ade65003-bc0b-43b4-ba9d-76cd8729deb1" path="/var/lib/kubelet/pods/ade65003-bc0b-43b4-ba9d-76cd8729deb1/volumes" Dec 06 08:41:29 crc kubenswrapper[4763]: I1206 08:41:29.730829 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a" path="/var/lib/kubelet/pods/c95d3b64-a7bd-4a3e-8e2b-cfdd1be69e7a/volumes" Dec 06 08:41:29 crc kubenswrapper[4763]: I1206 08:41:29.731443 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3a4e496-4b51-4e05-8b48-7edf7846d70c" path="/var/lib/kubelet/pods/e3a4e496-4b51-4e05-8b48-7edf7846d70c/volumes" Dec 06 08:41:30 crc kubenswrapper[4763]: I1206 08:41:30.720556 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:41:30 crc kubenswrapper[4763]: E1206 08:41:30.720806 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:44 crc kubenswrapper[4763]: I1206 08:41:44.719874 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:41:44 crc kubenswrapper[4763]: E1206 08:41:44.720759 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:57 crc kubenswrapper[4763]: I1206 08:41:57.726766 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:41:57 crc kubenswrapper[4763]: E1206 08:41:57.728155 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:41:59 crc kubenswrapper[4763]: I1206 08:41:59.041328 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7nmxm"] Dec 06 08:41:59 crc kubenswrapper[4763]: I1206 08:41:59.051111 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7nmxm"] Dec 06 08:41:59 crc kubenswrapper[4763]: I1206 08:41:59.731171 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1d63988-0eda-49ae-b4ec-0cf81b1f9784" path="/var/lib/kubelet/pods/a1d63988-0eda-49ae-b4ec-0cf81b1f9784/volumes" Dec 06 08:42:08 crc kubenswrapper[4763]: I1206 08:42:08.720252 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:42:08 crc kubenswrapper[4763]: E1206 08:42:08.721224 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:42:18 crc kubenswrapper[4763]: I1206 08:42:18.877069 4763 generic.go:334] "Generic (PLEG): container finished" podID="6463dad3-6446-4186-9c4f-39264a7f8679" containerID="6c01eab12f72cf8818f44efc27456c225e7c69929cae3358cddd140ac75f1382" exitCode=0 Dec 06 08:42:18 crc kubenswrapper[4763]: I1206 08:42:18.877138 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" event={"ID":"6463dad3-6446-4186-9c4f-39264a7f8679","Type":"ContainerDied","Data":"6c01eab12f72cf8818f44efc27456c225e7c69929cae3358cddd140ac75f1382"} Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.311191 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.495076 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw2tz\" (UniqueName: \"kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz\") pod \"6463dad3-6446-4186-9c4f-39264a7f8679\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.495223 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key\") pod \"6463dad3-6446-4186-9c4f-39264a7f8679\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.495323 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory\") pod \"6463dad3-6446-4186-9c4f-39264a7f8679\" (UID: \"6463dad3-6446-4186-9c4f-39264a7f8679\") " Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.500733 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz" (OuterVolumeSpecName: "kube-api-access-gw2tz") pod "6463dad3-6446-4186-9c4f-39264a7f8679" (UID: "6463dad3-6446-4186-9c4f-39264a7f8679"). InnerVolumeSpecName "kube-api-access-gw2tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.524763 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory" (OuterVolumeSpecName: "inventory") pod "6463dad3-6446-4186-9c4f-39264a7f8679" (UID: "6463dad3-6446-4186-9c4f-39264a7f8679"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.525127 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6463dad3-6446-4186-9c4f-39264a7f8679" (UID: "6463dad3-6446-4186-9c4f-39264a7f8679"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.598067 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.598108 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6463dad3-6446-4186-9c4f-39264a7f8679-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.598119 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw2tz\" (UniqueName: \"kubernetes.io/projected/6463dad3-6446-4186-9c4f-39264a7f8679-kube-api-access-gw2tz\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.898378 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" event={"ID":"6463dad3-6446-4186-9c4f-39264a7f8679","Type":"ContainerDied","Data":"e7c14ce41ddf3c2c666b77d4e7628bf861bf171f2b5f09314e98396a3d69d4c9"} Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.899155 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7c14ce41ddf3c2c666b77d4e7628bf861bf171f2b5f09314e98396a3d69d4c9" Dec 06 08:42:20 crc kubenswrapper[4763]: I1206 08:42:20.898425 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-526ks" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.009405 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76"] Dec 06 08:42:21 crc kubenswrapper[4763]: E1206 08:42:21.009776 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6463dad3-6446-4186-9c4f-39264a7f8679" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.009794 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="6463dad3-6446-4186-9c4f-39264a7f8679" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.010029 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="6463dad3-6446-4186-9c4f-39264a7f8679" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.010689 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.014502 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.014502 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.014632 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.015428 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.024709 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76"] Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.108368 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.108520 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.108697 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kfkd\" (UniqueName: \"kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.209646 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kfkd\" (UniqueName: \"kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.209770 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.209918 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.214236 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.222641 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.229167 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kfkd\" (UniqueName: \"kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z2j76\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.336246 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.859166 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76"] Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.871088 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:42:21 crc kubenswrapper[4763]: I1206 08:42:21.909128 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" event={"ID":"fecaf57c-7b83-41f2-a99c-001dd99c72d6","Type":"ContainerStarted","Data":"85b1465374d03ba294d3a8618d793388758e8980d5c8863b846b47d68623aa20"} Dec 06 08:42:22 crc kubenswrapper[4763]: I1206 08:42:22.720325 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:42:22 crc kubenswrapper[4763]: E1206 08:42:22.721787 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:42:22 crc kubenswrapper[4763]: I1206 08:42:22.921040 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" event={"ID":"fecaf57c-7b83-41f2-a99c-001dd99c72d6","Type":"ContainerStarted","Data":"8ed4c8818be81853c84a4631e01938ec04bee9b1e6567728f0e0178a78e5176d"} Dec 06 08:42:22 crc kubenswrapper[4763]: I1206 08:42:22.952026 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" podStartSLOduration=2.536461559 podStartE2EDuration="2.951999633s" podCreationTimestamp="2025-12-06 08:42:20 +0000 UTC" firstStartedPulling="2025-12-06 08:42:21.870766692 +0000 UTC m=+1824.446471740" lastFinishedPulling="2025-12-06 08:42:22.286304776 +0000 UTC m=+1824.862009814" observedRunningTime="2025-12-06 08:42:22.936536355 +0000 UTC m=+1825.512241433" watchObservedRunningTime="2025-12-06 08:42:22.951999633 +0000 UTC m=+1825.527704711" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.055103 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-b47q2"] Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.063242 4763 scope.go:117] "RemoveContainer" containerID="0cded627a2cadd4726b8895afa9ccecabb715e3182001096b4bbc2ec60ff1395" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.067948 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-b47q2"] Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.092671 4763 scope.go:117] "RemoveContainer" containerID="29ffe96153cb0d73b77807cd6b82344fc55acf089dc138c21c0ec7c68c43178f" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.186469 4763 scope.go:117] "RemoveContainer" containerID="47ce7ecd429f3aeeff3f204650f5ab79ed14aceaae2fcff4146042d595715556" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.206106 4763 scope.go:117] "RemoveContainer" containerID="7093f4301b57be16bbd48c53cbc462645f2b401817814f3461964e8700d78312" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.227878 4763 scope.go:117] "RemoveContainer" containerID="f6c57d6cd958a1fb65b31e203be2c6dcf02f8bf778b3af47df43a1720837dab4" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.248918 4763 scope.go:117] "RemoveContainer" containerID="0c090efeca0e20d3d72c8e409b6b2151d7b912f8f620783f2a5c21f5943147b5" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.273014 4763 scope.go:117] "RemoveContainer" containerID="76699d38119a38d502310a830558b278889dd2c7a0842979ab647d696b4a5df0" Dec 06 08:42:23 crc kubenswrapper[4763]: I1206 08:42:23.733792 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dace35f-53fc-43b5-a8c7-d58ba87f496b" path="/var/lib/kubelet/pods/9dace35f-53fc-43b5-a8c7-d58ba87f496b/volumes" Dec 06 08:42:27 crc kubenswrapper[4763]: I1206 08:42:27.062177 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j9gvv"] Dec 06 08:42:27 crc kubenswrapper[4763]: I1206 08:42:27.077279 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j9gvv"] Dec 06 08:42:27 crc kubenswrapper[4763]: I1206 08:42:27.734980 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40742493-d161-4853-89e0-f2841a3ea6d7" path="/var/lib/kubelet/pods/40742493-d161-4853-89e0-f2841a3ea6d7/volumes" Dec 06 08:42:27 crc kubenswrapper[4763]: I1206 08:42:27.964882 4763 generic.go:334] "Generic (PLEG): container finished" podID="fecaf57c-7b83-41f2-a99c-001dd99c72d6" containerID="8ed4c8818be81853c84a4631e01938ec04bee9b1e6567728f0e0178a78e5176d" exitCode=0 Dec 06 08:42:27 crc kubenswrapper[4763]: I1206 08:42:27.964943 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" event={"ID":"fecaf57c-7b83-41f2-a99c-001dd99c72d6","Type":"ContainerDied","Data":"8ed4c8818be81853c84a4631e01938ec04bee9b1e6567728f0e0178a78e5176d"} Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.390413 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.514888 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kfkd\" (UniqueName: \"kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd\") pod \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.515099 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key\") pod \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.515181 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory\") pod \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\" (UID: \"fecaf57c-7b83-41f2-a99c-001dd99c72d6\") " Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.521474 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd" (OuterVolumeSpecName: "kube-api-access-5kfkd") pod "fecaf57c-7b83-41f2-a99c-001dd99c72d6" (UID: "fecaf57c-7b83-41f2-a99c-001dd99c72d6"). InnerVolumeSpecName "kube-api-access-5kfkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.545108 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory" (OuterVolumeSpecName: "inventory") pod "fecaf57c-7b83-41f2-a99c-001dd99c72d6" (UID: "fecaf57c-7b83-41f2-a99c-001dd99c72d6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.547590 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fecaf57c-7b83-41f2-a99c-001dd99c72d6" (UID: "fecaf57c-7b83-41f2-a99c-001dd99c72d6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.616990 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.617024 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fecaf57c-7b83-41f2-a99c-001dd99c72d6-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.617034 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kfkd\" (UniqueName: \"kubernetes.io/projected/fecaf57c-7b83-41f2-a99c-001dd99c72d6-kube-api-access-5kfkd\") on node \"crc\" DevicePath \"\"" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.985178 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" event={"ID":"fecaf57c-7b83-41f2-a99c-001dd99c72d6","Type":"ContainerDied","Data":"85b1465374d03ba294d3a8618d793388758e8980d5c8863b846b47d68623aa20"} Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.985266 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85b1465374d03ba294d3a8618d793388758e8980d5c8863b846b47d68623aa20" Dec 06 08:42:29 crc kubenswrapper[4763]: I1206 08:42:29.985281 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z2j76" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.071706 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h"] Dec 06 08:42:30 crc kubenswrapper[4763]: E1206 08:42:30.072355 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecaf57c-7b83-41f2-a99c-001dd99c72d6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.072384 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecaf57c-7b83-41f2-a99c-001dd99c72d6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.072724 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="fecaf57c-7b83-41f2-a99c-001dd99c72d6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.073768 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.085809 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h"] Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.109864 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.110185 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.110360 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.111215 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.126918 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.127051 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.127229 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km86g\" (UniqueName: \"kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.229201 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km86g\" (UniqueName: \"kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.229283 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.229322 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.234650 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.234988 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.247237 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km86g\" (UniqueName: \"kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v6h6h\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.428610 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:42:30 crc kubenswrapper[4763]: I1206 08:42:30.977365 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h"] Dec 06 08:42:31 crc kubenswrapper[4763]: I1206 08:42:30.999705 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" event={"ID":"96d557e1-eb4f-4e15-b77c-2d308ddadb17","Type":"ContainerStarted","Data":"f0cfcc5e2ee6729efaa444652a98474aaad2884a6b3dd207077749011ae30769"} Dec 06 08:42:32 crc kubenswrapper[4763]: I1206 08:42:32.015220 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" event={"ID":"96d557e1-eb4f-4e15-b77c-2d308ddadb17","Type":"ContainerStarted","Data":"71deeaefb8080d33dfcdee927ec302394a9e68bfc88c478085cc74bc275a98fc"} Dec 06 08:42:34 crc kubenswrapper[4763]: I1206 08:42:34.719498 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:42:34 crc kubenswrapper[4763]: E1206 08:42:34.720067 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:42:47 crc kubenswrapper[4763]: I1206 08:42:47.728597 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:42:47 crc kubenswrapper[4763]: E1206 08:42:47.729396 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:42:58 crc kubenswrapper[4763]: I1206 08:42:58.720159 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:42:58 crc kubenswrapper[4763]: E1206 08:42:58.721106 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:43:06 crc kubenswrapper[4763]: I1206 08:43:06.080264 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" podStartSLOduration=35.686912599 podStartE2EDuration="36.080245823s" podCreationTimestamp="2025-12-06 08:42:30 +0000 UTC" firstStartedPulling="2025-12-06 08:42:30.981292874 +0000 UTC m=+1833.556997902" lastFinishedPulling="2025-12-06 08:42:31.374626088 +0000 UTC m=+1833.950331126" observedRunningTime="2025-12-06 08:42:32.03682656 +0000 UTC m=+1834.612531598" watchObservedRunningTime="2025-12-06 08:43:06.080245823 +0000 UTC m=+1868.655950861" Dec 06 08:43:06 crc kubenswrapper[4763]: I1206 08:43:06.091847 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-2drns"] Dec 06 08:43:06 crc kubenswrapper[4763]: I1206 08:43:06.116496 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-2drns"] Dec 06 08:43:07 crc kubenswrapper[4763]: I1206 08:43:07.731089 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d95b1c06-d160-4443-82ab-80bc512a4fba" path="/var/lib/kubelet/pods/d95b1c06-d160-4443-82ab-80bc512a4fba/volumes" Dec 06 08:43:13 crc kubenswrapper[4763]: I1206 08:43:13.719924 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:43:14 crc kubenswrapper[4763]: I1206 08:43:14.395461 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e"} Dec 06 08:43:15 crc kubenswrapper[4763]: I1206 08:43:15.404632 4763 generic.go:334] "Generic (PLEG): container finished" podID="96d557e1-eb4f-4e15-b77c-2d308ddadb17" containerID="71deeaefb8080d33dfcdee927ec302394a9e68bfc88c478085cc74bc275a98fc" exitCode=0 Dec 06 08:43:15 crc kubenswrapper[4763]: I1206 08:43:15.404699 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" event={"ID":"96d557e1-eb4f-4e15-b77c-2d308ddadb17","Type":"ContainerDied","Data":"71deeaefb8080d33dfcdee927ec302394a9e68bfc88c478085cc74bc275a98fc"} Dec 06 08:43:16 crc kubenswrapper[4763]: I1206 08:43:16.879049 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.059065 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key\") pod \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.059164 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory\") pod \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.059267 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km86g\" (UniqueName: \"kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g\") pod \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\" (UID: \"96d557e1-eb4f-4e15-b77c-2d308ddadb17\") " Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.075955 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g" (OuterVolumeSpecName: "kube-api-access-km86g") pod "96d557e1-eb4f-4e15-b77c-2d308ddadb17" (UID: "96d557e1-eb4f-4e15-b77c-2d308ddadb17"). InnerVolumeSpecName "kube-api-access-km86g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.094028 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96d557e1-eb4f-4e15-b77c-2d308ddadb17" (UID: "96d557e1-eb4f-4e15-b77c-2d308ddadb17"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.103845 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory" (OuterVolumeSpecName: "inventory") pod "96d557e1-eb4f-4e15-b77c-2d308ddadb17" (UID: "96d557e1-eb4f-4e15-b77c-2d308ddadb17"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.161121 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.161153 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96d557e1-eb4f-4e15-b77c-2d308ddadb17-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.161165 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km86g\" (UniqueName: \"kubernetes.io/projected/96d557e1-eb4f-4e15-b77c-2d308ddadb17-kube-api-access-km86g\") on node \"crc\" DevicePath \"\"" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.422605 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" event={"ID":"96d557e1-eb4f-4e15-b77c-2d308ddadb17","Type":"ContainerDied","Data":"f0cfcc5e2ee6729efaa444652a98474aaad2884a6b3dd207077749011ae30769"} Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.422978 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0cfcc5e2ee6729efaa444652a98474aaad2884a6b3dd207077749011ae30769" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.422726 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v6h6h" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.506346 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7"] Dec 06 08:43:17 crc kubenswrapper[4763]: E1206 08:43:17.506735 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d557e1-eb4f-4e15-b77c-2d308ddadb17" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.506752 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d557e1-eb4f-4e15-b77c-2d308ddadb17" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.507436 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d557e1-eb4f-4e15-b77c-2d308ddadb17" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.508308 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.514627 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.514890 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.514941 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.520474 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7"] Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.524545 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.671328 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlf5t\" (UniqueName: \"kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.671498 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.671707 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.773666 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.773759 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.773865 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlf5t\" (UniqueName: \"kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.778381 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.779718 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.791973 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlf5t\" (UniqueName: \"kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:17 crc kubenswrapper[4763]: I1206 08:43:17.829810 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:43:18 crc kubenswrapper[4763]: I1206 08:43:18.396643 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7"] Dec 06 08:43:18 crc kubenswrapper[4763]: I1206 08:43:18.435423 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" event={"ID":"aed517d7-adb8-4335-8184-6c55f27dd3b8","Type":"ContainerStarted","Data":"1d45acb2879287f3b85aceb23377995e5025d7588ca489b500aa4a20df1fcb46"} Dec 06 08:43:19 crc kubenswrapper[4763]: I1206 08:43:19.447623 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" event={"ID":"aed517d7-adb8-4335-8184-6c55f27dd3b8","Type":"ContainerStarted","Data":"78d14d771d32073afd67b371893fd2cd8eb0f83d2f98c583ec40917f7739b87b"} Dec 06 08:43:23 crc kubenswrapper[4763]: I1206 08:43:23.430262 4763 scope.go:117] "RemoveContainer" containerID="55b99a70902a11c8fbd76781ad65c120fadbffaff96c1b0b96932d1e5c3d2d43" Dec 06 08:43:23 crc kubenswrapper[4763]: I1206 08:43:23.482368 4763 scope.go:117] "RemoveContainer" containerID="e4b6c360a2a90838f5017bd14dbbbb0bbdb8fd3b2e7926caae7bafc0d6523d57" Dec 06 08:43:23 crc kubenswrapper[4763]: I1206 08:43:23.519543 4763 scope.go:117] "RemoveContainer" containerID="a511531cbcddccb6b2e61346bfd21c4943905b42915613641facfe34c4bb6ae1" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.531404 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" podStartSLOduration=23.791437494 podStartE2EDuration="24.531377016s" podCreationTimestamp="2025-12-06 08:43:17 +0000 UTC" firstStartedPulling="2025-12-06 08:43:18.409074372 +0000 UTC m=+1880.984779410" lastFinishedPulling="2025-12-06 08:43:19.149013894 +0000 UTC m=+1881.724718932" observedRunningTime="2025-12-06 08:43:19.466616698 +0000 UTC m=+1882.042321806" watchObservedRunningTime="2025-12-06 08:43:41.531377016 +0000 UTC m=+1904.107082054" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.535698 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.540407 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.552303 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.645124 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.645235 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbv4s\" (UniqueName: \"kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.645407 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.747621 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbv4s\" (UniqueName: \"kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.747683 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.747834 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.748422 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.748549 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.775782 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbv4s\" (UniqueName: \"kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s\") pod \"redhat-operators-fvprt\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:41 crc kubenswrapper[4763]: I1206 08:43:41.873916 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:42 crc kubenswrapper[4763]: I1206 08:43:42.366322 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:43:42 crc kubenswrapper[4763]: I1206 08:43:42.674001 4763 generic.go:334] "Generic (PLEG): container finished" podID="0b710eec-9340-4cac-a773-0bb812c4733d" containerID="94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187" exitCode=0 Dec 06 08:43:42 crc kubenswrapper[4763]: I1206 08:43:42.674228 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerDied","Data":"94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187"} Dec 06 08:43:42 crc kubenswrapper[4763]: I1206 08:43:42.674344 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerStarted","Data":"9b0953b27a4aa27ccd3aaec02ff37d75379cf1d489b3693e31bf19f7f364ea68"} Dec 06 08:43:43 crc kubenswrapper[4763]: I1206 08:43:43.686777 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerStarted","Data":"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2"} Dec 06 08:43:46 crc kubenswrapper[4763]: I1206 08:43:46.721312 4763 generic.go:334] "Generic (PLEG): container finished" podID="0b710eec-9340-4cac-a773-0bb812c4733d" containerID="e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2" exitCode=0 Dec 06 08:43:46 crc kubenswrapper[4763]: I1206 08:43:46.721404 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerDied","Data":"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2"} Dec 06 08:43:47 crc kubenswrapper[4763]: I1206 08:43:47.732722 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerStarted","Data":"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61"} Dec 06 08:43:47 crc kubenswrapper[4763]: I1206 08:43:47.759074 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fvprt" podStartSLOduration=2.26555402 podStartE2EDuration="6.759041135s" podCreationTimestamp="2025-12-06 08:43:41 +0000 UTC" firstStartedPulling="2025-12-06 08:43:42.677647452 +0000 UTC m=+1905.253352490" lastFinishedPulling="2025-12-06 08:43:47.171134567 +0000 UTC m=+1909.746839605" observedRunningTime="2025-12-06 08:43:47.749024104 +0000 UTC m=+1910.324729152" watchObservedRunningTime="2025-12-06 08:43:47.759041135 +0000 UTC m=+1910.334746173" Dec 06 08:43:51 crc kubenswrapper[4763]: I1206 08:43:51.874136 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:51 crc kubenswrapper[4763]: I1206 08:43:51.875588 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:43:52 crc kubenswrapper[4763]: I1206 08:43:52.915222 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fvprt" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="registry-server" probeResult="failure" output=< Dec 06 08:43:52 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 08:43:52 crc kubenswrapper[4763]: > Dec 06 08:44:01 crc kubenswrapper[4763]: I1206 08:44:01.916686 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:44:01 crc kubenswrapper[4763]: I1206 08:44:01.968703 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:44:02 crc kubenswrapper[4763]: I1206 08:44:02.146676 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:44:03 crc kubenswrapper[4763]: I1206 08:44:03.893421 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fvprt" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="registry-server" containerID="cri-o://285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61" gracePeriod=2 Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.872586 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.913714 4763 generic.go:334] "Generic (PLEG): container finished" podID="0b710eec-9340-4cac-a773-0bb812c4733d" containerID="285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61" exitCode=0 Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.913755 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerDied","Data":"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61"} Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.913782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fvprt" event={"ID":"0b710eec-9340-4cac-a773-0bb812c4733d","Type":"ContainerDied","Data":"9b0953b27a4aa27ccd3aaec02ff37d75379cf1d489b3693e31bf19f7f364ea68"} Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.913801 4763 scope.go:117] "RemoveContainer" containerID="285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61" Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.913960 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fvprt" Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.943413 4763 scope.go:117] "RemoveContainer" containerID="e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2" Dec 06 08:44:04 crc kubenswrapper[4763]: I1206 08:44:04.970126 4763 scope.go:117] "RemoveContainer" containerID="94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.015688 4763 scope.go:117] "RemoveContainer" containerID="285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61" Dec 06 08:44:05 crc kubenswrapper[4763]: E1206 08:44:05.016736 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61\": container with ID starting with 285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61 not found: ID does not exist" containerID="285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.016781 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61"} err="failed to get container status \"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61\": rpc error: code = NotFound desc = could not find container \"285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61\": container with ID starting with 285244dd4c094be82c39ba634f12cc4169ffb609e34803f3c40a26c94830ed61 not found: ID does not exist" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.016809 4763 scope.go:117] "RemoveContainer" containerID="e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2" Dec 06 08:44:05 crc kubenswrapper[4763]: E1206 08:44:05.017528 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2\": container with ID starting with e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2 not found: ID does not exist" containerID="e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.017555 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2"} err="failed to get container status \"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2\": rpc error: code = NotFound desc = could not find container \"e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2\": container with ID starting with e080a0f84d051b47589c7122cc8f75ab18427fce52a69a4a7370a08b29e3bfa2 not found: ID does not exist" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.017575 4763 scope.go:117] "RemoveContainer" containerID="94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187" Dec 06 08:44:05 crc kubenswrapper[4763]: E1206 08:44:05.017873 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187\": container with ID starting with 94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187 not found: ID does not exist" containerID="94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.017930 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187"} err="failed to get container status \"94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187\": rpc error: code = NotFound desc = could not find container \"94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187\": container with ID starting with 94c8aa104947827de4b3bd157ceb8e010420269f6fb6ead4d4145a534e442187 not found: ID does not exist" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.025881 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbv4s\" (UniqueName: \"kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s\") pod \"0b710eec-9340-4cac-a773-0bb812c4733d\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.027796 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities\") pod \"0b710eec-9340-4cac-a773-0bb812c4733d\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.027976 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content\") pod \"0b710eec-9340-4cac-a773-0bb812c4733d\" (UID: \"0b710eec-9340-4cac-a773-0bb812c4733d\") " Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.033308 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities" (OuterVolumeSpecName: "utilities") pod "0b710eec-9340-4cac-a773-0bb812c4733d" (UID: "0b710eec-9340-4cac-a773-0bb812c4733d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.034466 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s" (OuterVolumeSpecName: "kube-api-access-dbv4s") pod "0b710eec-9340-4cac-a773-0bb812c4733d" (UID: "0b710eec-9340-4cac-a773-0bb812c4733d"). InnerVolumeSpecName "kube-api-access-dbv4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.130562 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.130616 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbv4s\" (UniqueName: \"kubernetes.io/projected/0b710eec-9340-4cac-a773-0bb812c4733d-kube-api-access-dbv4s\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.138181 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b710eec-9340-4cac-a773-0bb812c4733d" (UID: "0b710eec-9340-4cac-a773-0bb812c4733d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.232016 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b710eec-9340-4cac-a773-0bb812c4733d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.253922 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:44:05 crc kubenswrapper[4763]: I1206 08:44:05.266387 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fvprt"] Dec 06 08:44:06 crc kubenswrapper[4763]: I1206 08:44:06.087969 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" path="/var/lib/kubelet/pods/0b710eec-9340-4cac-a773-0bb812c4733d/volumes" Dec 06 08:44:10 crc kubenswrapper[4763]: I1206 08:44:10.098188 4763 generic.go:334] "Generic (PLEG): container finished" podID="aed517d7-adb8-4335-8184-6c55f27dd3b8" containerID="78d14d771d32073afd67b371893fd2cd8eb0f83d2f98c583ec40917f7739b87b" exitCode=0 Dec 06 08:44:10 crc kubenswrapper[4763]: I1206 08:44:10.098282 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" event={"ID":"aed517d7-adb8-4335-8184-6c55f27dd3b8","Type":"ContainerDied","Data":"78d14d771d32073afd67b371893fd2cd8eb0f83d2f98c583ec40917f7739b87b"} Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.474429 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.652752 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlf5t\" (UniqueName: \"kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t\") pod \"aed517d7-adb8-4335-8184-6c55f27dd3b8\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.652984 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory\") pod \"aed517d7-adb8-4335-8184-6c55f27dd3b8\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.653105 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key\") pod \"aed517d7-adb8-4335-8184-6c55f27dd3b8\" (UID: \"aed517d7-adb8-4335-8184-6c55f27dd3b8\") " Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.658497 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t" (OuterVolumeSpecName: "kube-api-access-xlf5t") pod "aed517d7-adb8-4335-8184-6c55f27dd3b8" (UID: "aed517d7-adb8-4335-8184-6c55f27dd3b8"). InnerVolumeSpecName "kube-api-access-xlf5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.680569 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory" (OuterVolumeSpecName: "inventory") pod "aed517d7-adb8-4335-8184-6c55f27dd3b8" (UID: "aed517d7-adb8-4335-8184-6c55f27dd3b8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.685279 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aed517d7-adb8-4335-8184-6c55f27dd3b8" (UID: "aed517d7-adb8-4335-8184-6c55f27dd3b8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.755317 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.755351 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlf5t\" (UniqueName: \"kubernetes.io/projected/aed517d7-adb8-4335-8184-6c55f27dd3b8-kube-api-access-xlf5t\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:11 crc kubenswrapper[4763]: I1206 08:44:11.755365 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed517d7-adb8-4335-8184-6c55f27dd3b8-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.113954 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" event={"ID":"aed517d7-adb8-4335-8184-6c55f27dd3b8","Type":"ContainerDied","Data":"1d45acb2879287f3b85aceb23377995e5025d7588ca489b500aa4a20df1fcb46"} Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.113990 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d45acb2879287f3b85aceb23377995e5025d7588ca489b500aa4a20df1fcb46" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.114006 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.195717 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-pkzvh"] Dec 06 08:44:12 crc kubenswrapper[4763]: E1206 08:44:12.196102 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed517d7-adb8-4335-8184-6c55f27dd3b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196121 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed517d7-adb8-4335-8184-6c55f27dd3b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:12 crc kubenswrapper[4763]: E1206 08:44:12.196134 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="extract-utilities" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196141 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="extract-utilities" Dec 06 08:44:12 crc kubenswrapper[4763]: E1206 08:44:12.196169 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="registry-server" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196176 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="registry-server" Dec 06 08:44:12 crc kubenswrapper[4763]: E1206 08:44:12.196189 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="extract-content" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196195 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="extract-content" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196373 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b710eec-9340-4cac-a773-0bb812c4733d" containerName="registry-server" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.196390 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed517d7-adb8-4335-8184-6c55f27dd3b8" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.197081 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.199083 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.199296 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.199408 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.200455 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.209635 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-pkzvh"] Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.365042 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.365380 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x667q\" (UniqueName: \"kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.365464 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.468026 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.468147 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.468229 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x667q\" (UniqueName: \"kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.474634 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.475769 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.485057 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x667q\" (UniqueName: \"kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q\") pod \"ssh-known-hosts-edpm-deployment-pkzvh\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:12 crc kubenswrapper[4763]: I1206 08:44:12.524499 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:13 crc kubenswrapper[4763]: I1206 08:44:13.077818 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-pkzvh"] Dec 06 08:44:13 crc kubenswrapper[4763]: W1206 08:44:13.082976 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67504944_9db6_4422_937a_70be47b9a514.slice/crio-1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb WatchSource:0}: Error finding container 1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb: Status 404 returned error can't find the container with id 1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb Dec 06 08:44:13 crc kubenswrapper[4763]: I1206 08:44:13.124833 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" event={"ID":"67504944-9db6-4422-937a-70be47b9a514","Type":"ContainerStarted","Data":"1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb"} Dec 06 08:44:14 crc kubenswrapper[4763]: I1206 08:44:14.135066 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" event={"ID":"67504944-9db6-4422-937a-70be47b9a514","Type":"ContainerStarted","Data":"356f9650bb2ef33d58dc42a988d1a3410185e670d1b886f0b5ec17c4f0df8c16"} Dec 06 08:44:14 crc kubenswrapper[4763]: I1206 08:44:14.154437 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" podStartSLOduration=1.637503534 podStartE2EDuration="2.15441904s" podCreationTimestamp="2025-12-06 08:44:12 +0000 UTC" firstStartedPulling="2025-12-06 08:44:13.086557806 +0000 UTC m=+1935.662262854" lastFinishedPulling="2025-12-06 08:44:13.603473322 +0000 UTC m=+1936.179178360" observedRunningTime="2025-12-06 08:44:14.152981311 +0000 UTC m=+1936.728686359" watchObservedRunningTime="2025-12-06 08:44:14.15441904 +0000 UTC m=+1936.730124078" Dec 06 08:44:21 crc kubenswrapper[4763]: I1206 08:44:21.193644 4763 generic.go:334] "Generic (PLEG): container finished" podID="67504944-9db6-4422-937a-70be47b9a514" containerID="356f9650bb2ef33d58dc42a988d1a3410185e670d1b886f0b5ec17c4f0df8c16" exitCode=0 Dec 06 08:44:21 crc kubenswrapper[4763]: I1206 08:44:21.193736 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" event={"ID":"67504944-9db6-4422-937a-70be47b9a514","Type":"ContainerDied","Data":"356f9650bb2ef33d58dc42a988d1a3410185e670d1b886f0b5ec17c4f0df8c16"} Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.709797 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.888445 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x667q\" (UniqueName: \"kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q\") pod \"67504944-9db6-4422-937a-70be47b9a514\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.888567 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam\") pod \"67504944-9db6-4422-937a-70be47b9a514\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.888591 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0\") pod \"67504944-9db6-4422-937a-70be47b9a514\" (UID: \"67504944-9db6-4422-937a-70be47b9a514\") " Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.894283 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q" (OuterVolumeSpecName: "kube-api-access-x667q") pod "67504944-9db6-4422-937a-70be47b9a514" (UID: "67504944-9db6-4422-937a-70be47b9a514"). InnerVolumeSpecName "kube-api-access-x667q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.919585 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "67504944-9db6-4422-937a-70be47b9a514" (UID: "67504944-9db6-4422-937a-70be47b9a514"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.933787 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "67504944-9db6-4422-937a-70be47b9a514" (UID: "67504944-9db6-4422-937a-70be47b9a514"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.990166 4763 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.990205 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x667q\" (UniqueName: \"kubernetes.io/projected/67504944-9db6-4422-937a-70be47b9a514-kube-api-access-x667q\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:22 crc kubenswrapper[4763]: I1206 08:44:22.990218 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/67504944-9db6-4422-937a-70be47b9a514-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.214296 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" event={"ID":"67504944-9db6-4422-937a-70be47b9a514","Type":"ContainerDied","Data":"1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb"} Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.214335 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1abc78af25b962f1fc489a820f6e4fff6768fa0e4e21e89d4eaef35cf9a399fb" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.214391 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-pkzvh" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.285639 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn"] Dec 06 08:44:23 crc kubenswrapper[4763]: E1206 08:44:23.286043 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67504944-9db6-4422-937a-70be47b9a514" containerName="ssh-known-hosts-edpm-deployment" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.286059 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="67504944-9db6-4422-937a-70be47b9a514" containerName="ssh-known-hosts-edpm-deployment" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.286230 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="67504944-9db6-4422-937a-70be47b9a514" containerName="ssh-known-hosts-edpm-deployment" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.286886 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.289315 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.289482 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.289831 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.289755 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.295744 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.296087 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.296157 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvsk7\" (UniqueName: \"kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.304657 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn"] Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.397957 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvsk7\" (UniqueName: \"kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.398068 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.398131 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.402130 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.403662 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.413041 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvsk7\" (UniqueName: \"kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7v5wn\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:23 crc kubenswrapper[4763]: I1206 08:44:23.602155 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:24 crc kubenswrapper[4763]: I1206 08:44:24.125503 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn"] Dec 06 08:44:24 crc kubenswrapper[4763]: I1206 08:44:24.226385 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" event={"ID":"82b99854-bc09-408a-b477-30156ae38d45","Type":"ContainerStarted","Data":"c8fc8804b482eb27f9aab5dd7be8a51ac7e53adab8560b0dadf56655e58c381b"} Dec 06 08:44:25 crc kubenswrapper[4763]: I1206 08:44:25.235575 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" event={"ID":"82b99854-bc09-408a-b477-30156ae38d45","Type":"ContainerStarted","Data":"b2b04cd706a1d0980385efd19780d1f917befcdc85fec9422b042199d3dbc0eb"} Dec 06 08:44:32 crc kubenswrapper[4763]: I1206 08:44:32.296331 4763 generic.go:334] "Generic (PLEG): container finished" podID="82b99854-bc09-408a-b477-30156ae38d45" containerID="b2b04cd706a1d0980385efd19780d1f917befcdc85fec9422b042199d3dbc0eb" exitCode=0 Dec 06 08:44:32 crc kubenswrapper[4763]: I1206 08:44:32.296428 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" event={"ID":"82b99854-bc09-408a-b477-30156ae38d45","Type":"ContainerDied","Data":"b2b04cd706a1d0980385efd19780d1f917befcdc85fec9422b042199d3dbc0eb"} Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.723933 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.898240 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key\") pod \"82b99854-bc09-408a-b477-30156ae38d45\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.898326 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvsk7\" (UniqueName: \"kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7\") pod \"82b99854-bc09-408a-b477-30156ae38d45\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.898404 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory\") pod \"82b99854-bc09-408a-b477-30156ae38d45\" (UID: \"82b99854-bc09-408a-b477-30156ae38d45\") " Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.906554 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7" (OuterVolumeSpecName: "kube-api-access-cvsk7") pod "82b99854-bc09-408a-b477-30156ae38d45" (UID: "82b99854-bc09-408a-b477-30156ae38d45"). InnerVolumeSpecName "kube-api-access-cvsk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.930920 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory" (OuterVolumeSpecName: "inventory") pod "82b99854-bc09-408a-b477-30156ae38d45" (UID: "82b99854-bc09-408a-b477-30156ae38d45"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:33 crc kubenswrapper[4763]: I1206 08:44:33.939191 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "82b99854-bc09-408a-b477-30156ae38d45" (UID: "82b99854-bc09-408a-b477-30156ae38d45"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.000491 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.000527 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvsk7\" (UniqueName: \"kubernetes.io/projected/82b99854-bc09-408a-b477-30156ae38d45-kube-api-access-cvsk7\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.000539 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82b99854-bc09-408a-b477-30156ae38d45-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.316331 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" event={"ID":"82b99854-bc09-408a-b477-30156ae38d45","Type":"ContainerDied","Data":"c8fc8804b482eb27f9aab5dd7be8a51ac7e53adab8560b0dadf56655e58c381b"} Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.316708 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8fc8804b482eb27f9aab5dd7be8a51ac7e53adab8560b0dadf56655e58c381b" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.316415 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7v5wn" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.386340 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl"] Dec 06 08:44:34 crc kubenswrapper[4763]: E1206 08:44:34.386893 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82b99854-bc09-408a-b477-30156ae38d45" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.386937 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="82b99854-bc09-408a-b477-30156ae38d45" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.387186 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="82b99854-bc09-408a-b477-30156ae38d45" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.388166 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.392599 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.392996 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.392997 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.393102 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.411294 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl"] Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.509813 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.509851 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c9qs\" (UniqueName: \"kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.509937 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.612512 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.612571 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c9qs\" (UniqueName: \"kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.612710 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.617203 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.624408 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.635687 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c9qs\" (UniqueName: \"kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:34 crc kubenswrapper[4763]: I1206 08:44:34.713324 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:35 crc kubenswrapper[4763]: I1206 08:44:35.231537 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl"] Dec 06 08:44:35 crc kubenswrapper[4763]: I1206 08:44:35.325818 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" event={"ID":"fff914be-5b3a-4696-93b9-4d384009f6b6","Type":"ContainerStarted","Data":"d56682009110135b7ed157f6ce08e64952a1aeb7406ec325e542a243a9f481d0"} Dec 06 08:44:36 crc kubenswrapper[4763]: I1206 08:44:36.336125 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" event={"ID":"fff914be-5b3a-4696-93b9-4d384009f6b6","Type":"ContainerStarted","Data":"eba673d9d637cd1f100b1176ffab1dd5c5f1e8de9ff88831aa5d4337fa21b338"} Dec 06 08:44:36 crc kubenswrapper[4763]: I1206 08:44:36.366134 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" podStartSLOduration=1.963209481 podStartE2EDuration="2.366095263s" podCreationTimestamp="2025-12-06 08:44:34 +0000 UTC" firstStartedPulling="2025-12-06 08:44:35.229965461 +0000 UTC m=+1957.805670499" lastFinishedPulling="2025-12-06 08:44:35.632851243 +0000 UTC m=+1958.208556281" observedRunningTime="2025-12-06 08:44:36.355485436 +0000 UTC m=+1958.931190484" watchObservedRunningTime="2025-12-06 08:44:36.366095263 +0000 UTC m=+1958.941800341" Dec 06 08:44:45 crc kubenswrapper[4763]: I1206 08:44:45.409982 4763 generic.go:334] "Generic (PLEG): container finished" podID="fff914be-5b3a-4696-93b9-4d384009f6b6" containerID="eba673d9d637cd1f100b1176ffab1dd5c5f1e8de9ff88831aa5d4337fa21b338" exitCode=0 Dec 06 08:44:45 crc kubenswrapper[4763]: I1206 08:44:45.410099 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" event={"ID":"fff914be-5b3a-4696-93b9-4d384009f6b6","Type":"ContainerDied","Data":"eba673d9d637cd1f100b1176ffab1dd5c5f1e8de9ff88831aa5d4337fa21b338"} Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.823816 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.960736 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c9qs\" (UniqueName: \"kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs\") pod \"fff914be-5b3a-4696-93b9-4d384009f6b6\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.960886 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key\") pod \"fff914be-5b3a-4696-93b9-4d384009f6b6\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.960932 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory\") pod \"fff914be-5b3a-4696-93b9-4d384009f6b6\" (UID: \"fff914be-5b3a-4696-93b9-4d384009f6b6\") " Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.967349 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs" (OuterVolumeSpecName: "kube-api-access-7c9qs") pod "fff914be-5b3a-4696-93b9-4d384009f6b6" (UID: "fff914be-5b3a-4696-93b9-4d384009f6b6"). InnerVolumeSpecName "kube-api-access-7c9qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.989766 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory" (OuterVolumeSpecName: "inventory") pod "fff914be-5b3a-4696-93b9-4d384009f6b6" (UID: "fff914be-5b3a-4696-93b9-4d384009f6b6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:46 crc kubenswrapper[4763]: I1206 08:44:46.995604 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fff914be-5b3a-4696-93b9-4d384009f6b6" (UID: "fff914be-5b3a-4696-93b9-4d384009f6b6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.065112 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.065224 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fff914be-5b3a-4696-93b9-4d384009f6b6-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.065257 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c9qs\" (UniqueName: \"kubernetes.io/projected/fff914be-5b3a-4696-93b9-4d384009f6b6-kube-api-access-7c9qs\") on node \"crc\" DevicePath \"\"" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.429665 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" event={"ID":"fff914be-5b3a-4696-93b9-4d384009f6b6","Type":"ContainerDied","Data":"d56682009110135b7ed157f6ce08e64952a1aeb7406ec325e542a243a9f481d0"} Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.429715 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d56682009110135b7ed157f6ce08e64952a1aeb7406ec325e542a243a9f481d0" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.430123 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.552639 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67"] Dec 06 08:44:47 crc kubenswrapper[4763]: E1206 08:44:47.553216 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fff914be-5b3a-4696-93b9-4d384009f6b6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.553240 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fff914be-5b3a-4696-93b9-4d384009f6b6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.553621 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="fff914be-5b3a-4696-93b9-4d384009f6b6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.554936 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.557111 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.557339 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.557825 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.558072 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.558242 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.558968 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.559323 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.569338 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.586948 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67"] Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.683821 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.683874 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685124 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd9pf\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685257 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685308 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685385 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685470 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685552 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685618 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685699 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685756 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.685778 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.686086 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.686226 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789402 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789715 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789739 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789792 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd9pf\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789879 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789932 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.789964 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790017 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790055 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790099 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790138 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790202 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.790227 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.795489 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.796507 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.796730 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.797017 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.798694 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.799063 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.799794 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.800509 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.800594 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.800850 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.801272 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.804275 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.809078 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.822083 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd9pf\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8dk67\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:47 crc kubenswrapper[4763]: I1206 08:44:47.872669 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:44:48 crc kubenswrapper[4763]: I1206 08:44:48.386583 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67"] Dec 06 08:44:48 crc kubenswrapper[4763]: I1206 08:44:48.440892 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" event={"ID":"1fd29f51-61ee-4d62-a135-11d9fbc73a73","Type":"ContainerStarted","Data":"45ff743bd2b7ac13087614280ff8046cd96e37ff41aa4ea6c5349ec1366216d4"} Dec 06 08:44:49 crc kubenswrapper[4763]: I1206 08:44:49.451007 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" event={"ID":"1fd29f51-61ee-4d62-a135-11d9fbc73a73","Type":"ContainerStarted","Data":"59000bcd00cb411f13caf37a1fd83e41316873c69602cdae077b30451836551b"} Dec 06 08:44:49 crc kubenswrapper[4763]: I1206 08:44:49.476048 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" podStartSLOduration=2.075535724 podStartE2EDuration="2.476027171s" podCreationTimestamp="2025-12-06 08:44:47 +0000 UTC" firstStartedPulling="2025-12-06 08:44:48.394783355 +0000 UTC m=+1970.970488393" lastFinishedPulling="2025-12-06 08:44:48.795274802 +0000 UTC m=+1971.370979840" observedRunningTime="2025-12-06 08:44:49.46933071 +0000 UTC m=+1972.045035758" watchObservedRunningTime="2025-12-06 08:44:49.476027171 +0000 UTC m=+1972.051732229" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.150533 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8"] Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.152683 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.154611 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.155042 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.162544 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8"] Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.327361 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdlzc\" (UniqueName: \"kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.327465 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.327501 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.429213 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.429556 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.429786 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdlzc\" (UniqueName: \"kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.430383 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.435766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.449015 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdlzc\" (UniqueName: \"kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc\") pod \"collect-profiles-29416845-l4lg8\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.472132 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:00 crc kubenswrapper[4763]: I1206 08:45:00.917722 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8"] Dec 06 08:45:01 crc kubenswrapper[4763]: I1206 08:45:01.552243 4763 generic.go:334] "Generic (PLEG): container finished" podID="1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" containerID="7aa366474eb624c4022bb1569368528ec6b1e6225ab12080e3bb2a24fe495d50" exitCode=0 Dec 06 08:45:01 crc kubenswrapper[4763]: I1206 08:45:01.552321 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" event={"ID":"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67","Type":"ContainerDied","Data":"7aa366474eb624c4022bb1569368528ec6b1e6225ab12080e3bb2a24fe495d50"} Dec 06 08:45:01 crc kubenswrapper[4763]: I1206 08:45:01.552555 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" event={"ID":"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67","Type":"ContainerStarted","Data":"2d8725953a24247717773de7288497b18b641e098915f6415847105563337068"} Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.899595 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.982454 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume\") pod \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.982722 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume\") pod \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.982819 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdlzc\" (UniqueName: \"kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc\") pod \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\" (UID: \"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67\") " Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.983402 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume" (OuterVolumeSpecName: "config-volume") pod "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" (UID: "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.988722 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" (UID: "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:02 crc kubenswrapper[4763]: I1206 08:45:02.988828 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc" (OuterVolumeSpecName: "kube-api-access-xdlzc") pod "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" (UID: "1696c5a6-4a1e-454c-bc65-99f6b7cfbe67"). InnerVolumeSpecName "kube-api-access-xdlzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.085549 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.085602 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.085613 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdlzc\" (UniqueName: \"kubernetes.io/projected/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67-kube-api-access-xdlzc\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.571782 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" event={"ID":"1696c5a6-4a1e-454c-bc65-99f6b7cfbe67","Type":"ContainerDied","Data":"2d8725953a24247717773de7288497b18b641e098915f6415847105563337068"} Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.571829 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d8725953a24247717773de7288497b18b641e098915f6415847105563337068" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.571840 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8" Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.974878 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f"] Dec 06 08:45:03 crc kubenswrapper[4763]: I1206 08:45:03.983807 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416800-7c57f"] Dec 06 08:45:05 crc kubenswrapper[4763]: I1206 08:45:05.731740 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1cc0acf-7876-428e-8430-a14d2498a435" path="/var/lib/kubelet/pods/f1cc0acf-7876-428e-8430-a14d2498a435/volumes" Dec 06 08:45:23 crc kubenswrapper[4763]: I1206 08:45:23.663810 4763 scope.go:117] "RemoveContainer" containerID="e07c7d697b605fc72cff22475614c8ddc12c849c8fac575ff2d8d53b303a33d2" Dec 06 08:45:25 crc kubenswrapper[4763]: I1206 08:45:25.893202 4763 generic.go:334] "Generic (PLEG): container finished" podID="1fd29f51-61ee-4d62-a135-11d9fbc73a73" containerID="59000bcd00cb411f13caf37a1fd83e41316873c69602cdae077b30451836551b" exitCode=0 Dec 06 08:45:25 crc kubenswrapper[4763]: I1206 08:45:25.893307 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" event={"ID":"1fd29f51-61ee-4d62-a135-11d9fbc73a73","Type":"ContainerDied","Data":"59000bcd00cb411f13caf37a1fd83e41316873c69602cdae077b30451836551b"} Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.354477 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459516 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459639 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459716 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459747 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459824 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459847 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459877 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd9pf\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459944 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.459980 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.460003 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.460037 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.460069 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.460139 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.460175 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle\") pod \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\" (UID: \"1fd29f51-61ee-4d62-a135-11d9fbc73a73\") " Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468091 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468121 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468730 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468274 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468856 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468938 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.468944 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.470342 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.470788 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.470849 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.471471 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf" (OuterVolumeSpecName: "kube-api-access-wd9pf") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "kube-api-access-wd9pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.472115 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.497710 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.499012 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory" (OuterVolumeSpecName: "inventory") pod "1fd29f51-61ee-4d62-a135-11d9fbc73a73" (UID: "1fd29f51-61ee-4d62-a135-11d9fbc73a73"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562882 4763 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562956 4763 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562969 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562981 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562991 4763 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.562999 4763 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563007 4763 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563019 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563029 4763 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563039 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563047 4763 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563056 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fd29f51-61ee-4d62-a135-11d9fbc73a73-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563064 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.563075 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd9pf\" (UniqueName: \"kubernetes.io/projected/1fd29f51-61ee-4d62-a135-11d9fbc73a73-kube-api-access-wd9pf\") on node \"crc\" DevicePath \"\"" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.913275 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" event={"ID":"1fd29f51-61ee-4d62-a135-11d9fbc73a73","Type":"ContainerDied","Data":"45ff743bd2b7ac13087614280ff8046cd96e37ff41aa4ea6c5349ec1366216d4"} Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.913592 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45ff743bd2b7ac13087614280ff8046cd96e37ff41aa4ea6c5349ec1366216d4" Dec 06 08:45:27 crc kubenswrapper[4763]: I1206 08:45:27.913349 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8dk67" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.172444 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59"] Dec 06 08:45:28 crc kubenswrapper[4763]: E1206 08:45:28.173393 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" containerName="collect-profiles" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.173417 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" containerName="collect-profiles" Dec 06 08:45:28 crc kubenswrapper[4763]: E1206 08:45:28.173473 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd29f51-61ee-4d62-a135-11d9fbc73a73" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.173483 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd29f51-61ee-4d62-a135-11d9fbc73a73" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.174003 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd29f51-61ee-4d62-a135-11d9fbc73a73" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.174037 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" containerName="collect-profiles" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.185234 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.241223 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.241516 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.241651 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.243663 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.263248 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.293874 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.294040 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgn6m\" (UniqueName: \"kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.294063 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.294222 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.294244 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.307770 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59"] Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.396582 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgn6m\" (UniqueName: \"kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.396633 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.396738 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.396760 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.396822 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.398017 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.402082 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.408126 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.411800 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.415089 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgn6m\" (UniqueName: \"kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t5z59\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:28 crc kubenswrapper[4763]: I1206 08:45:28.628017 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:45:29 crc kubenswrapper[4763]: I1206 08:45:29.252065 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59"] Dec 06 08:45:29 crc kubenswrapper[4763]: W1206 08:45:29.256770 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4b3e774_f3e9_44a0_84aa_730f5a6ae8ee.slice/crio-8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29 WatchSource:0}: Error finding container 8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29: Status 404 returned error can't find the container with id 8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29 Dec 06 08:45:29 crc kubenswrapper[4763]: I1206 08:45:29.934971 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" event={"ID":"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee","Type":"ContainerStarted","Data":"8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29"} Dec 06 08:45:30 crc kubenswrapper[4763]: I1206 08:45:30.947389 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" event={"ID":"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee","Type":"ContainerStarted","Data":"1bdf9d5160e171ec88b1bf96a3cddde1f7006a6f763f45eee70879bf8c1144d7"} Dec 06 08:45:30 crc kubenswrapper[4763]: I1206 08:45:30.967641 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" podStartSLOduration=2.5049463210000003 podStartE2EDuration="2.96762261s" podCreationTimestamp="2025-12-06 08:45:28 +0000 UTC" firstStartedPulling="2025-12-06 08:45:29.259985825 +0000 UTC m=+2011.835690863" lastFinishedPulling="2025-12-06 08:45:29.722662114 +0000 UTC m=+2012.298367152" observedRunningTime="2025-12-06 08:45:30.966187381 +0000 UTC m=+2013.541892439" watchObservedRunningTime="2025-12-06 08:45:30.96762261 +0000 UTC m=+2013.543327648" Dec 06 08:45:42 crc kubenswrapper[4763]: I1206 08:45:42.538563 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:45:42 crc kubenswrapper[4763]: I1206 08:45:42.539155 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:46:12 crc kubenswrapper[4763]: I1206 08:46:12.537290 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:46:12 crc kubenswrapper[4763]: I1206 08:46:12.537950 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:46:33 crc kubenswrapper[4763]: I1206 08:46:33.504336 4763 generic.go:334] "Generic (PLEG): container finished" podID="a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" containerID="1bdf9d5160e171ec88b1bf96a3cddde1f7006a6f763f45eee70879bf8c1144d7" exitCode=0 Dec 06 08:46:33 crc kubenswrapper[4763]: I1206 08:46:33.504455 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" event={"ID":"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee","Type":"ContainerDied","Data":"1bdf9d5160e171ec88b1bf96a3cddde1f7006a6f763f45eee70879bf8c1144d7"} Dec 06 08:46:34 crc kubenswrapper[4763]: I1206 08:46:34.980020 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.114421 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key\") pod \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.114558 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0\") pod \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.114584 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory\") pod \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.114645 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle\") pod \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.114745 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgn6m\" (UniqueName: \"kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m\") pod \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\" (UID: \"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee\") " Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.121454 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m" (OuterVolumeSpecName: "kube-api-access-mgn6m") pod "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" (UID: "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee"). InnerVolumeSpecName "kube-api-access-mgn6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.122164 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" (UID: "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.146743 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" (UID: "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.151316 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory" (OuterVolumeSpecName: "inventory") pod "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" (UID: "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.155440 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" (UID: "a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.217497 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgn6m\" (UniqueName: \"kubernetes.io/projected/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-kube-api-access-mgn6m\") on node \"crc\" DevicePath \"\"" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.217545 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.217559 4763 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.217573 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.217585 4763 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.524609 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" event={"ID":"a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee","Type":"ContainerDied","Data":"8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29"} Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.524647 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a0923709dd25b5e70a47bc729602f85ece26f6bbac8895196d8c33054bd2c29" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.524658 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t5z59" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.624210 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv"] Dec 06 08:46:35 crc kubenswrapper[4763]: E1206 08:46:35.624635 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.624653 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.624858 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.625606 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.627886 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.628422 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.628528 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.628881 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.630446 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.634848 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.636862 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv"] Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726079 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726129 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726286 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726403 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqpts\" (UniqueName: \"kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726448 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.726498 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.828549 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.828651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.828779 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.828852 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqpts\" (UniqueName: \"kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.828923 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.829008 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.833947 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.834236 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.834515 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.838450 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.839672 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.846766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqpts\" (UniqueName: \"kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:35 crc kubenswrapper[4763]: I1206 08:46:35.951157 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:46:36 crc kubenswrapper[4763]: I1206 08:46:36.480644 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv"] Dec 06 08:46:36 crc kubenswrapper[4763]: I1206 08:46:36.534441 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" event={"ID":"fc43b8ce-1630-43f7-975e-fde4062cfc62","Type":"ContainerStarted","Data":"f89afdac54fa8217223bd2a5c33801bc5e918a1e0e3e336ed37043c6207e8ec9"} Dec 06 08:46:37 crc kubenswrapper[4763]: I1206 08:46:37.543433 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" event={"ID":"fc43b8ce-1630-43f7-975e-fde4062cfc62","Type":"ContainerStarted","Data":"77967fe0d6eed6bf8b362a27e61e8c243f29a8e35c564bf5aa172981ff26baaf"} Dec 06 08:46:37 crc kubenswrapper[4763]: I1206 08:46:37.565037 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" podStartSLOduration=2.08937057 podStartE2EDuration="2.565014621s" podCreationTimestamp="2025-12-06 08:46:35 +0000 UTC" firstStartedPulling="2025-12-06 08:46:36.48953168 +0000 UTC m=+2079.065236718" lastFinishedPulling="2025-12-06 08:46:36.965175731 +0000 UTC m=+2079.540880769" observedRunningTime="2025-12-06 08:46:37.558889945 +0000 UTC m=+2080.134594993" watchObservedRunningTime="2025-12-06 08:46:37.565014621 +0000 UTC m=+2080.140719669" Dec 06 08:46:42 crc kubenswrapper[4763]: I1206 08:46:42.537578 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:46:42 crc kubenswrapper[4763]: I1206 08:46:42.538149 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:46:42 crc kubenswrapper[4763]: I1206 08:46:42.538192 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:46:42 crc kubenswrapper[4763]: I1206 08:46:42.539074 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:46:42 crc kubenswrapper[4763]: I1206 08:46:42.539131 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e" gracePeriod=600 Dec 06 08:46:43 crc kubenswrapper[4763]: I1206 08:46:43.598417 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e" exitCode=0 Dec 06 08:46:43 crc kubenswrapper[4763]: I1206 08:46:43.598497 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e"} Dec 06 08:46:43 crc kubenswrapper[4763]: I1206 08:46:43.598993 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6"} Dec 06 08:46:43 crc kubenswrapper[4763]: I1206 08:46:43.599022 4763 scope.go:117] "RemoveContainer" containerID="c3f50c4b3eb2fb5ac927f22e5a6304d1ea962aaebd3737a80db79ea46b7c4206" Dec 06 08:47:25 crc kubenswrapper[4763]: I1206 08:47:25.990970 4763 generic.go:334] "Generic (PLEG): container finished" podID="fc43b8ce-1630-43f7-975e-fde4062cfc62" containerID="77967fe0d6eed6bf8b362a27e61e8c243f29a8e35c564bf5aa172981ff26baaf" exitCode=0 Dec 06 08:47:25 crc kubenswrapper[4763]: I1206 08:47:25.991074 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" event={"ID":"fc43b8ce-1630-43f7-975e-fde4062cfc62","Type":"ContainerDied","Data":"77967fe0d6eed6bf8b362a27e61e8c243f29a8e35c564bf5aa172981ff26baaf"} Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.420296 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477211 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477292 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477380 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477482 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477543 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.477708 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqpts\" (UniqueName: \"kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts\") pod \"fc43b8ce-1630-43f7-975e-fde4062cfc62\" (UID: \"fc43b8ce-1630-43f7-975e-fde4062cfc62\") " Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.484278 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts" (OuterVolumeSpecName: "kube-api-access-pqpts") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "kube-api-access-pqpts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.485854 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.508820 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.512949 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.525233 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.531340 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory" (OuterVolumeSpecName: "inventory") pod "fc43b8ce-1630-43f7-975e-fde4062cfc62" (UID: "fc43b8ce-1630-43f7-975e-fde4062cfc62"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580279 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqpts\" (UniqueName: \"kubernetes.io/projected/fc43b8ce-1630-43f7-975e-fde4062cfc62-kube-api-access-pqpts\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580315 4763 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580325 4763 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580336 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580346 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:27 crc kubenswrapper[4763]: I1206 08:47:27.580354 4763 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fc43b8ce-1630-43f7-975e-fde4062cfc62-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.011677 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" event={"ID":"fc43b8ce-1630-43f7-975e-fde4062cfc62","Type":"ContainerDied","Data":"f89afdac54fa8217223bd2a5c33801bc5e918a1e0e3e336ed37043c6207e8ec9"} Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.011722 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f89afdac54fa8217223bd2a5c33801bc5e918a1e0e3e336ed37043c6207e8ec9" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.011729 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.100305 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425"] Dec 06 08:47:28 crc kubenswrapper[4763]: E1206 08:47:28.101879 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc43b8ce-1630-43f7-975e-fde4062cfc62" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.102881 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc43b8ce-1630-43f7-975e-fde4062cfc62" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.103296 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc43b8ce-1630-43f7-975e-fde4062cfc62" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.104418 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.107474 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.107476 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.107595 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.107486 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.108629 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.113635 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425"] Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.191027 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.191116 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.191160 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.191199 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wkc4\" (UniqueName: \"kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.191241 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.294536 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.294651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.294711 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.294776 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wkc4\" (UniqueName: \"kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.294856 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.298709 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.298752 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.300177 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.300670 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.319274 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wkc4\" (UniqueName: \"kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nw425\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.421880 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.917672 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425"] Dec 06 08:47:28 crc kubenswrapper[4763]: I1206 08:47:28.923039 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:47:29 crc kubenswrapper[4763]: I1206 08:47:29.022849 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" event={"ID":"48ac6869-b493-4288-9837-9acc1cdc9a90","Type":"ContainerStarted","Data":"5e74432975cde6d819e90f196a085fa3eaac4cd371794e3ca6cb72e21e4d4157"} Dec 06 08:47:30 crc kubenswrapper[4763]: I1206 08:47:30.036684 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" event={"ID":"48ac6869-b493-4288-9837-9acc1cdc9a90","Type":"ContainerStarted","Data":"3c515f0432e9e80341a7cacb40f08e1feca573b1175aa0c45a0a86e87d49ad68"} Dec 06 08:47:30 crc kubenswrapper[4763]: I1206 08:47:30.060350 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" podStartSLOduration=1.661396414 podStartE2EDuration="2.060316008s" podCreationTimestamp="2025-12-06 08:47:28 +0000 UTC" firstStartedPulling="2025-12-06 08:47:28.922623935 +0000 UTC m=+2131.498328973" lastFinishedPulling="2025-12-06 08:47:29.321543529 +0000 UTC m=+2131.897248567" observedRunningTime="2025-12-06 08:47:30.050682997 +0000 UTC m=+2132.626388035" watchObservedRunningTime="2025-12-06 08:47:30.060316008 +0000 UTC m=+2132.636021076" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.223261 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.226026 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.257201 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.316267 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqdqz\" (UniqueName: \"kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.316359 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.316529 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.418455 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.418597 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.418663 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqdqz\" (UniqueName: \"kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.419016 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.419262 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.439812 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqdqz\" (UniqueName: \"kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz\") pod \"certified-operators-zl8qm\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:54 crc kubenswrapper[4763]: I1206 08:47:54.551085 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:47:55 crc kubenswrapper[4763]: I1206 08:47:55.034816 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:47:55 crc kubenswrapper[4763]: I1206 08:47:55.250463 4763 generic.go:334] "Generic (PLEG): container finished" podID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerID="ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75" exitCode=0 Dec 06 08:47:55 crc kubenswrapper[4763]: I1206 08:47:55.250650 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerDied","Data":"ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75"} Dec 06 08:47:55 crc kubenswrapper[4763]: I1206 08:47:55.250761 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerStarted","Data":"ff0af56d481dc527f1799bc78402c0724a6a3d7f807561204f935f6d510737e3"} Dec 06 08:47:56 crc kubenswrapper[4763]: I1206 08:47:56.260773 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerStarted","Data":"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f"} Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.271487 4763 generic.go:334] "Generic (PLEG): container finished" podID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerID="d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f" exitCode=0 Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.271583 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerDied","Data":"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f"} Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.613353 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.616374 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.641269 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.789139 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xtnm\" (UniqueName: \"kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.789211 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.789358 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.891197 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.891653 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.891719 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xtnm\" (UniqueName: \"kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.891766 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.892315 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.919762 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xtnm\" (UniqueName: \"kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm\") pod \"community-operators-526kf\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:57 crc kubenswrapper[4763]: I1206 08:47:57.955863 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:47:58 crc kubenswrapper[4763]: I1206 08:47:58.287292 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerStarted","Data":"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a"} Dec 06 08:47:58 crc kubenswrapper[4763]: I1206 08:47:58.308024 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zl8qm" podStartSLOduration=1.8084513439999998 podStartE2EDuration="4.307998256s" podCreationTimestamp="2025-12-06 08:47:54 +0000 UTC" firstStartedPulling="2025-12-06 08:47:55.252612466 +0000 UTC m=+2157.828317504" lastFinishedPulling="2025-12-06 08:47:57.752159378 +0000 UTC m=+2160.327864416" observedRunningTime="2025-12-06 08:47:58.303483794 +0000 UTC m=+2160.879188842" watchObservedRunningTime="2025-12-06 08:47:58.307998256 +0000 UTC m=+2160.883703294" Dec 06 08:47:58 crc kubenswrapper[4763]: I1206 08:47:58.552265 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:47:58 crc kubenswrapper[4763]: W1206 08:47:58.552365 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ac397e7_b40f_447c_9466_bd67b9778b1c.slice/crio-90f5aaab59673ee649556f2133dba0bf093c3e2b8e806dd98852bfd3fecf9dec WatchSource:0}: Error finding container 90f5aaab59673ee649556f2133dba0bf093c3e2b8e806dd98852bfd3fecf9dec: Status 404 returned error can't find the container with id 90f5aaab59673ee649556f2133dba0bf093c3e2b8e806dd98852bfd3fecf9dec Dec 06 08:47:59 crc kubenswrapper[4763]: I1206 08:47:59.296857 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerStarted","Data":"90f5aaab59673ee649556f2133dba0bf093c3e2b8e806dd98852bfd3fecf9dec"} Dec 06 08:48:00 crc kubenswrapper[4763]: I1206 08:48:00.306019 4763 generic.go:334] "Generic (PLEG): container finished" podID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerID="073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643" exitCode=0 Dec 06 08:48:00 crc kubenswrapper[4763]: I1206 08:48:00.306131 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerDied","Data":"073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643"} Dec 06 08:48:02 crc kubenswrapper[4763]: I1206 08:48:02.325019 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerStarted","Data":"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1"} Dec 06 08:48:03 crc kubenswrapper[4763]: I1206 08:48:03.338182 4763 generic.go:334] "Generic (PLEG): container finished" podID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerID="1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1" exitCode=0 Dec 06 08:48:03 crc kubenswrapper[4763]: I1206 08:48:03.338312 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerDied","Data":"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1"} Dec 06 08:48:04 crc kubenswrapper[4763]: I1206 08:48:04.551644 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:04 crc kubenswrapper[4763]: I1206 08:48:04.551983 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:04 crc kubenswrapper[4763]: I1206 08:48:04.600244 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:05 crc kubenswrapper[4763]: I1206 08:48:05.359543 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerStarted","Data":"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17"} Dec 06 08:48:05 crc kubenswrapper[4763]: I1206 08:48:05.388062 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-526kf" podStartSLOduration=4.053841663 podStartE2EDuration="8.388044112s" podCreationTimestamp="2025-12-06 08:47:57 +0000 UTC" firstStartedPulling="2025-12-06 08:48:00.307566643 +0000 UTC m=+2162.883271681" lastFinishedPulling="2025-12-06 08:48:04.641769092 +0000 UTC m=+2167.217474130" observedRunningTime="2025-12-06 08:48:05.382133302 +0000 UTC m=+2167.957838350" watchObservedRunningTime="2025-12-06 08:48:05.388044112 +0000 UTC m=+2167.963749140" Dec 06 08:48:05 crc kubenswrapper[4763]: I1206 08:48:05.414514 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:06 crc kubenswrapper[4763]: I1206 08:48:06.198116 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:48:07 crc kubenswrapper[4763]: I1206 08:48:07.374932 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zl8qm" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="registry-server" containerID="cri-o://1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a" gracePeriod=2 Dec 06 08:48:07 crc kubenswrapper[4763]: I1206 08:48:07.956227 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:07 crc kubenswrapper[4763]: I1206 08:48:07.956361 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:07 crc kubenswrapper[4763]: I1206 08:48:07.999304 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.362504 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.382462 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqdqz\" (UniqueName: \"kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz\") pod \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.383633 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content\") pod \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.383729 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities\") pod \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\" (UID: \"55a3e925-8f05-482d-9551-b3c8a6c95eaa\") " Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.384990 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities" (OuterVolumeSpecName: "utilities") pod "55a3e925-8f05-482d-9551-b3c8a6c95eaa" (UID: "55a3e925-8f05-482d-9551-b3c8a6c95eaa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.390324 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz" (OuterVolumeSpecName: "kube-api-access-rqdqz") pod "55a3e925-8f05-482d-9551-b3c8a6c95eaa" (UID: "55a3e925-8f05-482d-9551-b3c8a6c95eaa"). InnerVolumeSpecName "kube-api-access-rqdqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.420851 4763 generic.go:334] "Generic (PLEG): container finished" podID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerID="1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a" exitCode=0 Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.421414 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zl8qm" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.421569 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerDied","Data":"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a"} Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.421620 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zl8qm" event={"ID":"55a3e925-8f05-482d-9551-b3c8a6c95eaa","Type":"ContainerDied","Data":"ff0af56d481dc527f1799bc78402c0724a6a3d7f807561204f935f6d510737e3"} Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.421643 4763 scope.go:117] "RemoveContainer" containerID="1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.442477 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55a3e925-8f05-482d-9551-b3c8a6c95eaa" (UID: "55a3e925-8f05-482d-9551-b3c8a6c95eaa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.459011 4763 scope.go:117] "RemoveContainer" containerID="d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.478945 4763 scope.go:117] "RemoveContainer" containerID="ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.486365 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqdqz\" (UniqueName: \"kubernetes.io/projected/55a3e925-8f05-482d-9551-b3c8a6c95eaa-kube-api-access-rqdqz\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.486401 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.486414 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55a3e925-8f05-482d-9551-b3c8a6c95eaa-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.527443 4763 scope.go:117] "RemoveContainer" containerID="1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a" Dec 06 08:48:08 crc kubenswrapper[4763]: E1206 08:48:08.527992 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a\": container with ID starting with 1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a not found: ID does not exist" containerID="1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.528023 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a"} err="failed to get container status \"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a\": rpc error: code = NotFound desc = could not find container \"1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a\": container with ID starting with 1da8a160e772186a0a8958f64784b76a8489dbcbdf1dfd6e96065e0ab5bc822a not found: ID does not exist" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.528042 4763 scope.go:117] "RemoveContainer" containerID="d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f" Dec 06 08:48:08 crc kubenswrapper[4763]: E1206 08:48:08.528360 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f\": container with ID starting with d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f not found: ID does not exist" containerID="d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.528381 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f"} err="failed to get container status \"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f\": rpc error: code = NotFound desc = could not find container \"d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f\": container with ID starting with d2e10e9dacf53997f8a9c4180e1752c30be6d9a5c3762d3a8922a535c065ae3f not found: ID does not exist" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.528429 4763 scope.go:117] "RemoveContainer" containerID="ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75" Dec 06 08:48:08 crc kubenswrapper[4763]: E1206 08:48:08.528848 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75\": container with ID starting with ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75 not found: ID does not exist" containerID="ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.528892 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75"} err="failed to get container status \"ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75\": rpc error: code = NotFound desc = could not find container \"ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75\": container with ID starting with ee14f47b30deefe8749a4af03d5ac601d262efe9caa0c9789a4e41b79b7a0c75 not found: ID does not exist" Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.758543 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:48:08 crc kubenswrapper[4763]: I1206 08:48:08.767706 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zl8qm"] Dec 06 08:48:09 crc kubenswrapper[4763]: I1206 08:48:09.474295 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:09 crc kubenswrapper[4763]: I1206 08:48:09.731510 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" path="/var/lib/kubelet/pods/55a3e925-8f05-482d-9551-b3c8a6c95eaa/volumes" Dec 06 08:48:10 crc kubenswrapper[4763]: I1206 08:48:10.400397 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:48:11 crc kubenswrapper[4763]: I1206 08:48:11.449504 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-526kf" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="registry-server" containerID="cri-o://b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17" gracePeriod=2 Dec 06 08:48:11 crc kubenswrapper[4763]: I1206 08:48:11.961666 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.060231 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities\") pod \"0ac397e7-b40f-447c-9466-bd67b9778b1c\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.060400 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content\") pod \"0ac397e7-b40f-447c-9466-bd67b9778b1c\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.060569 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xtnm\" (UniqueName: \"kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm\") pod \"0ac397e7-b40f-447c-9466-bd67b9778b1c\" (UID: \"0ac397e7-b40f-447c-9466-bd67b9778b1c\") " Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.061986 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities" (OuterVolumeSpecName: "utilities") pod "0ac397e7-b40f-447c-9466-bd67b9778b1c" (UID: "0ac397e7-b40f-447c-9466-bd67b9778b1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.067379 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm" (OuterVolumeSpecName: "kube-api-access-7xtnm") pod "0ac397e7-b40f-447c-9466-bd67b9778b1c" (UID: "0ac397e7-b40f-447c-9466-bd67b9778b1c"). InnerVolumeSpecName "kube-api-access-7xtnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.111344 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ac397e7-b40f-447c-9466-bd67b9778b1c" (UID: "0ac397e7-b40f-447c-9466-bd67b9778b1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.162588 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xtnm\" (UniqueName: \"kubernetes.io/projected/0ac397e7-b40f-447c-9466-bd67b9778b1c-kube-api-access-7xtnm\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.162621 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.162630 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ac397e7-b40f-447c-9466-bd67b9778b1c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.460936 4763 generic.go:334] "Generic (PLEG): container finished" podID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerID="b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17" exitCode=0 Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.460969 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerDied","Data":"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17"} Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.461021 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-526kf" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.461036 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-526kf" event={"ID":"0ac397e7-b40f-447c-9466-bd67b9778b1c","Type":"ContainerDied","Data":"90f5aaab59673ee649556f2133dba0bf093c3e2b8e806dd98852bfd3fecf9dec"} Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.461058 4763 scope.go:117] "RemoveContainer" containerID="b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.480550 4763 scope.go:117] "RemoveContainer" containerID="1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.502410 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.511635 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-526kf"] Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.522020 4763 scope.go:117] "RemoveContainer" containerID="073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.567761 4763 scope.go:117] "RemoveContainer" containerID="b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17" Dec 06 08:48:12 crc kubenswrapper[4763]: E1206 08:48:12.568220 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17\": container with ID starting with b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17 not found: ID does not exist" containerID="b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.568264 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17"} err="failed to get container status \"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17\": rpc error: code = NotFound desc = could not find container \"b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17\": container with ID starting with b02afca6b2ff1910b4acdc6e4c05fbf56cea883155b31b45ae18a9b8d2948f17 not found: ID does not exist" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.568284 4763 scope.go:117] "RemoveContainer" containerID="1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1" Dec 06 08:48:12 crc kubenswrapper[4763]: E1206 08:48:12.568619 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1\": container with ID starting with 1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1 not found: ID does not exist" containerID="1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.568641 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1"} err="failed to get container status \"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1\": rpc error: code = NotFound desc = could not find container \"1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1\": container with ID starting with 1d920ff36a8f30415f8e602fa6915a73c539207155b2a61d617a51f436b583c1 not found: ID does not exist" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.568654 4763 scope.go:117] "RemoveContainer" containerID="073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643" Dec 06 08:48:12 crc kubenswrapper[4763]: E1206 08:48:12.570460 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643\": container with ID starting with 073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643 not found: ID does not exist" containerID="073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643" Dec 06 08:48:12 crc kubenswrapper[4763]: I1206 08:48:12.570489 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643"} err="failed to get container status \"073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643\": rpc error: code = NotFound desc = could not find container \"073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643\": container with ID starting with 073a92346fd0074eb24ab019acb491d94f9ba3462a96436fcd427c5b65818643 not found: ID does not exist" Dec 06 08:48:13 crc kubenswrapper[4763]: I1206 08:48:13.738824 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" path="/var/lib/kubelet/pods/0ac397e7-b40f-447c-9466-bd67b9778b1c/volumes" Dec 06 08:48:42 crc kubenswrapper[4763]: I1206 08:48:42.537253 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:48:42 crc kubenswrapper[4763]: I1206 08:48:42.537709 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:49:12 crc kubenswrapper[4763]: I1206 08:49:12.537045 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:49:12 crc kubenswrapper[4763]: I1206 08:49:12.537990 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:49:42 crc kubenswrapper[4763]: I1206 08:49:42.536766 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:49:42 crc kubenswrapper[4763]: I1206 08:49:42.537654 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:49:42 crc kubenswrapper[4763]: I1206 08:49:42.537737 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:49:42 crc kubenswrapper[4763]: I1206 08:49:42.539042 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:49:42 crc kubenswrapper[4763]: I1206 08:49:42.539107 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" gracePeriod=600 Dec 06 08:49:42 crc kubenswrapper[4763]: E1206 08:49:42.663346 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:49:43 crc kubenswrapper[4763]: I1206 08:49:43.277259 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" exitCode=0 Dec 06 08:49:43 crc kubenswrapper[4763]: I1206 08:49:43.277304 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6"} Dec 06 08:49:43 crc kubenswrapper[4763]: I1206 08:49:43.277337 4763 scope.go:117] "RemoveContainer" containerID="ae1be8b2d0ea850b8e3985a086ff6ca03663fa450062cd2440c8c4dd4d72b10e" Dec 06 08:49:43 crc kubenswrapper[4763]: I1206 08:49:43.278104 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:49:43 crc kubenswrapper[4763]: E1206 08:49:43.278397 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:49:54 crc kubenswrapper[4763]: I1206 08:49:54.720073 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:49:54 crc kubenswrapper[4763]: E1206 08:49:54.721147 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:50:05 crc kubenswrapper[4763]: I1206 08:50:05.720662 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:50:05 crc kubenswrapper[4763]: E1206 08:50:05.721545 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.266981 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268466 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268483 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268506 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="extract-utilities" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268513 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="extract-utilities" Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268535 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="extract-content" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268543 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="extract-content" Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268557 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268562 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268591 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="extract-content" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268597 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="extract-content" Dec 06 08:50:14 crc kubenswrapper[4763]: E1206 08:50:14.268616 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="extract-utilities" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.268622 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="extract-utilities" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.269037 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ac397e7-b40f-447c-9466-bd67b9778b1c" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.269067 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="55a3e925-8f05-482d-9551-b3c8a6c95eaa" containerName="registry-server" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.273123 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.285432 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.373049 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.373156 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.373246 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m4zt\" (UniqueName: \"kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.475994 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.476097 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m4zt\" (UniqueName: \"kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.476274 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.476663 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.476873 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.496058 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m4zt\" (UniqueName: \"kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt\") pod \"redhat-marketplace-6f97h\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:14 crc kubenswrapper[4763]: I1206 08:50:14.651679 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:15 crc kubenswrapper[4763]: I1206 08:50:15.133416 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:15 crc kubenswrapper[4763]: I1206 08:50:15.570951 4763 generic.go:334] "Generic (PLEG): container finished" podID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerID="28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c" exitCode=0 Dec 06 08:50:15 crc kubenswrapper[4763]: I1206 08:50:15.571088 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerDied","Data":"28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c"} Dec 06 08:50:15 crc kubenswrapper[4763]: I1206 08:50:15.571129 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerStarted","Data":"b38ec1e9f6aa580e2259984d44bb4266af657ee0f421d59b5bc157d6b4516cbc"} Dec 06 08:50:16 crc kubenswrapper[4763]: I1206 08:50:16.581319 4763 generic.go:334] "Generic (PLEG): container finished" podID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerID="d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137" exitCode=0 Dec 06 08:50:16 crc kubenswrapper[4763]: I1206 08:50:16.581394 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerDied","Data":"d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137"} Dec 06 08:50:16 crc kubenswrapper[4763]: I1206 08:50:16.719661 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:50:16 crc kubenswrapper[4763]: E1206 08:50:16.720044 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:50:17 crc kubenswrapper[4763]: I1206 08:50:17.592192 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerStarted","Data":"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49"} Dec 06 08:50:17 crc kubenswrapper[4763]: I1206 08:50:17.621413 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6f97h" podStartSLOduration=2.216138485 podStartE2EDuration="3.621391203s" podCreationTimestamp="2025-12-06 08:50:14 +0000 UTC" firstStartedPulling="2025-12-06 08:50:15.574180597 +0000 UTC m=+2298.149885635" lastFinishedPulling="2025-12-06 08:50:16.979433285 +0000 UTC m=+2299.555138353" observedRunningTime="2025-12-06 08:50:17.610272042 +0000 UTC m=+2300.185977090" watchObservedRunningTime="2025-12-06 08:50:17.621391203 +0000 UTC m=+2300.197096251" Dec 06 08:50:24 crc kubenswrapper[4763]: I1206 08:50:24.652700 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:24 crc kubenswrapper[4763]: I1206 08:50:24.653332 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:24 crc kubenswrapper[4763]: I1206 08:50:24.699978 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:24 crc kubenswrapper[4763]: I1206 08:50:24.749794 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.030030 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.030360 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6f97h" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="registry-server" containerID="cri-o://026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49" gracePeriod=2 Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.487733 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.553684 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities\") pod \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.553812 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m4zt\" (UniqueName: \"kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt\") pod \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.554118 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content\") pod \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\" (UID: \"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6\") " Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.554578 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities" (OuterVolumeSpecName: "utilities") pod "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" (UID: "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.561192 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt" (OuterVolumeSpecName: "kube-api-access-5m4zt") pod "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" (UID: "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6"). InnerVolumeSpecName "kube-api-access-5m4zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.573295 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" (UID: "498d599e-a7c0-435b-a5dd-c2ab72fcb0c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.657151 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.657217 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m4zt\" (UniqueName: \"kubernetes.io/projected/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-kube-api-access-5m4zt\") on node \"crc\" DevicePath \"\"" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.657237 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.719167 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:50:28 crc kubenswrapper[4763]: E1206 08:50:28.719808 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.724420 4763 generic.go:334] "Generic (PLEG): container finished" podID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerID="026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49" exitCode=0 Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.724454 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerDied","Data":"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49"} Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.724470 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6f97h" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.724477 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6f97h" event={"ID":"498d599e-a7c0-435b-a5dd-c2ab72fcb0c6","Type":"ContainerDied","Data":"b38ec1e9f6aa580e2259984d44bb4266af657ee0f421d59b5bc157d6b4516cbc"} Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.724493 4763 scope.go:117] "RemoveContainer" containerID="026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.746990 4763 scope.go:117] "RemoveContainer" containerID="d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.756182 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.764156 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6f97h"] Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.773138 4763 scope.go:117] "RemoveContainer" containerID="28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.819750 4763 scope.go:117] "RemoveContainer" containerID="026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49" Dec 06 08:50:28 crc kubenswrapper[4763]: E1206 08:50:28.820260 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49\": container with ID starting with 026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49 not found: ID does not exist" containerID="026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.820305 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49"} err="failed to get container status \"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49\": rpc error: code = NotFound desc = could not find container \"026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49\": container with ID starting with 026f23de582a0d4ec943e0a1eca8cdb6847fee4333b36a6843555f38b585aa49 not found: ID does not exist" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.820330 4763 scope.go:117] "RemoveContainer" containerID="d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137" Dec 06 08:50:28 crc kubenswrapper[4763]: E1206 08:50:28.820779 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137\": container with ID starting with d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137 not found: ID does not exist" containerID="d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.820818 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137"} err="failed to get container status \"d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137\": rpc error: code = NotFound desc = could not find container \"d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137\": container with ID starting with d1aa4a8117ac7eb14b28b181d6c699c83d4a38b06afb0a429ee9c4a7bd911137 not found: ID does not exist" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.820840 4763 scope.go:117] "RemoveContainer" containerID="28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c" Dec 06 08:50:28 crc kubenswrapper[4763]: E1206 08:50:28.821145 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c\": container with ID starting with 28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c not found: ID does not exist" containerID="28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c" Dec 06 08:50:28 crc kubenswrapper[4763]: I1206 08:50:28.821173 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c"} err="failed to get container status \"28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c\": rpc error: code = NotFound desc = could not find container \"28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c\": container with ID starting with 28e1d161666d9df3fc91f6fa6df655bae49c405398046e785cb027708f78a23c not found: ID does not exist" Dec 06 08:50:29 crc kubenswrapper[4763]: I1206 08:50:29.764040 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" path="/var/lib/kubelet/pods/498d599e-a7c0-435b-a5dd-c2ab72fcb0c6/volumes" Dec 06 08:50:41 crc kubenswrapper[4763]: I1206 08:50:41.719336 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:50:41 crc kubenswrapper[4763]: E1206 08:50:41.720284 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:50:53 crc kubenswrapper[4763]: I1206 08:50:53.720728 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:50:53 crc kubenswrapper[4763]: E1206 08:50:53.721561 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:51:07 crc kubenswrapper[4763]: I1206 08:51:07.726654 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:51:07 crc kubenswrapper[4763]: E1206 08:51:07.727540 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:51:21 crc kubenswrapper[4763]: I1206 08:51:21.720290 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:51:21 crc kubenswrapper[4763]: E1206 08:51:21.720873 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:51:32 crc kubenswrapper[4763]: I1206 08:51:32.719962 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:51:32 crc kubenswrapper[4763]: E1206 08:51:32.720757 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:51:47 crc kubenswrapper[4763]: I1206 08:51:47.725938 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:51:47 crc kubenswrapper[4763]: E1206 08:51:47.726529 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:51:59 crc kubenswrapper[4763]: I1206 08:51:59.719357 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:51:59 crc kubenswrapper[4763]: E1206 08:51:59.720100 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:52:10 crc kubenswrapper[4763]: I1206 08:52:10.694885 4763 generic.go:334] "Generic (PLEG): container finished" podID="48ac6869-b493-4288-9837-9acc1cdc9a90" containerID="3c515f0432e9e80341a7cacb40f08e1feca573b1175aa0c45a0a86e87d49ad68" exitCode=0 Dec 06 08:52:10 crc kubenswrapper[4763]: I1206 08:52:10.694975 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" event={"ID":"48ac6869-b493-4288-9837-9acc1cdc9a90","Type":"ContainerDied","Data":"3c515f0432e9e80341a7cacb40f08e1feca573b1175aa0c45a0a86e87d49ad68"} Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.116780 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.251814 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wkc4\" (UniqueName: \"kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4\") pod \"48ac6869-b493-4288-9837-9acc1cdc9a90\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.251931 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key\") pod \"48ac6869-b493-4288-9837-9acc1cdc9a90\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.251969 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0\") pod \"48ac6869-b493-4288-9837-9acc1cdc9a90\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.251997 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory\") pod \"48ac6869-b493-4288-9837-9acc1cdc9a90\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.252068 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle\") pod \"48ac6869-b493-4288-9837-9acc1cdc9a90\" (UID: \"48ac6869-b493-4288-9837-9acc1cdc9a90\") " Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.264263 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "48ac6869-b493-4288-9837-9acc1cdc9a90" (UID: "48ac6869-b493-4288-9837-9acc1cdc9a90"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.264348 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4" (OuterVolumeSpecName: "kube-api-access-5wkc4") pod "48ac6869-b493-4288-9837-9acc1cdc9a90" (UID: "48ac6869-b493-4288-9837-9acc1cdc9a90"). InnerVolumeSpecName "kube-api-access-5wkc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.282397 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory" (OuterVolumeSpecName: "inventory") pod "48ac6869-b493-4288-9837-9acc1cdc9a90" (UID: "48ac6869-b493-4288-9837-9acc1cdc9a90"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.282968 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "48ac6869-b493-4288-9837-9acc1cdc9a90" (UID: "48ac6869-b493-4288-9837-9acc1cdc9a90"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.293058 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "48ac6869-b493-4288-9837-9acc1cdc9a90" (UID: "48ac6869-b493-4288-9837-9acc1cdc9a90"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.357424 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wkc4\" (UniqueName: \"kubernetes.io/projected/48ac6869-b493-4288-9837-9acc1cdc9a90-kube-api-access-5wkc4\") on node \"crc\" DevicePath \"\"" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.357468 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.357480 4763 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.357491 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.357502 4763 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ac6869-b493-4288-9837-9acc1cdc9a90-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.713331 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" event={"ID":"48ac6869-b493-4288-9837-9acc1cdc9a90","Type":"ContainerDied","Data":"5e74432975cde6d819e90f196a085fa3eaac4cd371794e3ca6cb72e21e4d4157"} Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.713380 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e74432975cde6d819e90f196a085fa3eaac4cd371794e3ca6cb72e21e4d4157" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.713379 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nw425" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810120 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v"] Dec 06 08:52:12 crc kubenswrapper[4763]: E1206 08:52:12.810611 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="extract-utilities" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810635 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="extract-utilities" Dec 06 08:52:12 crc kubenswrapper[4763]: E1206 08:52:12.810661 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="extract-content" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810668 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="extract-content" Dec 06 08:52:12 crc kubenswrapper[4763]: E1206 08:52:12.810703 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ac6869-b493-4288-9837-9acc1cdc9a90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810711 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ac6869-b493-4288-9837-9acc1cdc9a90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 06 08:52:12 crc kubenswrapper[4763]: E1206 08:52:12.810722 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="registry-server" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810728 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="registry-server" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.810978 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ac6869-b493-4288-9837-9acc1cdc9a90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.811003 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="498d599e-a7c0-435b-a5dd-c2ab72fcb0c6" containerName="registry-server" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.811814 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.813586 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.814550 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.818110 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.818133 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.818660 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.818889 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.824590 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.826605 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v"] Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868561 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-449n6\" (UniqueName: \"kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868614 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868642 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868683 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868761 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868803 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868868 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868890 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.868932 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.970813 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.970877 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.970959 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.970980 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.971002 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.971029 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-449n6\" (UniqueName: \"kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.971048 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.971071 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.971103 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.972564 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976166 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976198 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976196 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976341 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976603 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.976720 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.977060 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:12 crc kubenswrapper[4763]: I1206 08:52:12.987314 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-449n6\" (UniqueName: \"kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6\") pod \"nova-edpm-deployment-openstack-edpm-ipam-nhr4v\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:13 crc kubenswrapper[4763]: I1206 08:52:13.135036 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:52:13 crc kubenswrapper[4763]: I1206 08:52:13.639957 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v"] Dec 06 08:52:13 crc kubenswrapper[4763]: I1206 08:52:13.722996 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:52:13 crc kubenswrapper[4763]: E1206 08:52:13.723282 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:52:13 crc kubenswrapper[4763]: I1206 08:52:13.732780 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" event={"ID":"d118117b-51ec-4b2f-ae42-61af6c35ba88","Type":"ContainerStarted","Data":"ad26bbdd78b90cb973c3b148d9fc1865b75720ee647beb95f8a99eaa1d80f9dc"} Dec 06 08:52:14 crc kubenswrapper[4763]: I1206 08:52:14.740626 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" event={"ID":"d118117b-51ec-4b2f-ae42-61af6c35ba88","Type":"ContainerStarted","Data":"e51b7dbb88570f5fafe815558e75195c9342be9d223a848578d018032dc26720"} Dec 06 08:52:14 crc kubenswrapper[4763]: I1206 08:52:14.761106 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" podStartSLOduration=2.091068139 podStartE2EDuration="2.761082191s" podCreationTimestamp="2025-12-06 08:52:12 +0000 UTC" firstStartedPulling="2025-12-06 08:52:13.650777974 +0000 UTC m=+2416.226483022" lastFinishedPulling="2025-12-06 08:52:14.320792046 +0000 UTC m=+2416.896497074" observedRunningTime="2025-12-06 08:52:14.756456716 +0000 UTC m=+2417.332161764" watchObservedRunningTime="2025-12-06 08:52:14.761082191 +0000 UTC m=+2417.336787229" Dec 06 08:52:28 crc kubenswrapper[4763]: I1206 08:52:28.720483 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:52:28 crc kubenswrapper[4763]: E1206 08:52:28.721298 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:52:39 crc kubenswrapper[4763]: I1206 08:52:39.719716 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:52:39 crc kubenswrapper[4763]: E1206 08:52:39.720784 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:52:52 crc kubenswrapper[4763]: I1206 08:52:52.719856 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:52:52 crc kubenswrapper[4763]: E1206 08:52:52.720649 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:53:07 crc kubenswrapper[4763]: I1206 08:53:07.728265 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:53:07 crc kubenswrapper[4763]: E1206 08:53:07.729132 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:53:20 crc kubenswrapper[4763]: I1206 08:53:20.719880 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:53:20 crc kubenswrapper[4763]: E1206 08:53:20.720716 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:53:31 crc kubenswrapper[4763]: I1206 08:53:31.744449 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:53:31 crc kubenswrapper[4763]: E1206 08:53:31.745384 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:53:46 crc kubenswrapper[4763]: I1206 08:53:46.719587 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:53:46 crc kubenswrapper[4763]: E1206 08:53:46.720336 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:53:58 crc kubenswrapper[4763]: I1206 08:53:58.719983 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:53:58 crc kubenswrapper[4763]: E1206 08:53:58.720766 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:54:12 crc kubenswrapper[4763]: I1206 08:54:12.720547 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:54:12 crc kubenswrapper[4763]: E1206 08:54:12.721752 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.672357 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.674940 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.697433 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.720406 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:54:25 crc kubenswrapper[4763]: E1206 08:54:25.720852 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.778982 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.779098 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx5bl\" (UniqueName: \"kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.779240 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.882157 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.882614 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx5bl\" (UniqueName: \"kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.882665 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.884077 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.884610 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:25 crc kubenswrapper[4763]: I1206 08:54:25.908820 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx5bl\" (UniqueName: \"kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl\") pod \"redhat-operators-bztvd\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.016234 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.546323 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.923158 4763 generic.go:334] "Generic (PLEG): container finished" podID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerID="03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36" exitCode=0 Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.923207 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerDied","Data":"03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36"} Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.923232 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerStarted","Data":"f5e242860d5e12253f7240a4d537014b74cfe71a8dcd3cca3eeaedef81d84218"} Dec 06 08:54:26 crc kubenswrapper[4763]: I1206 08:54:26.925596 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 08:54:27 crc kubenswrapper[4763]: I1206 08:54:27.935279 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerStarted","Data":"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6"} Dec 06 08:54:28 crc kubenswrapper[4763]: I1206 08:54:28.945552 4763 generic.go:334] "Generic (PLEG): container finished" podID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerID="620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6" exitCode=0 Dec 06 08:54:28 crc kubenswrapper[4763]: I1206 08:54:28.945599 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerDied","Data":"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6"} Dec 06 08:54:29 crc kubenswrapper[4763]: I1206 08:54:29.965598 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerStarted","Data":"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2"} Dec 06 08:54:29 crc kubenswrapper[4763]: I1206 08:54:29.985864 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bztvd" podStartSLOduration=2.497166389 podStartE2EDuration="4.98584422s" podCreationTimestamp="2025-12-06 08:54:25 +0000 UTC" firstStartedPulling="2025-12-06 08:54:26.925269033 +0000 UTC m=+2549.500974071" lastFinishedPulling="2025-12-06 08:54:29.413946864 +0000 UTC m=+2551.989651902" observedRunningTime="2025-12-06 08:54:29.981319988 +0000 UTC m=+2552.557025036" watchObservedRunningTime="2025-12-06 08:54:29.98584422 +0000 UTC m=+2552.561549258" Dec 06 08:54:36 crc kubenswrapper[4763]: I1206 08:54:36.016829 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:36 crc kubenswrapper[4763]: I1206 08:54:36.017412 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:36 crc kubenswrapper[4763]: I1206 08:54:36.069261 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:36 crc kubenswrapper[4763]: I1206 08:54:36.720106 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:54:36 crc kubenswrapper[4763]: E1206 08:54:36.720765 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 08:54:37 crc kubenswrapper[4763]: I1206 08:54:37.077467 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:37 crc kubenswrapper[4763]: I1206 08:54:37.125193 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:39 crc kubenswrapper[4763]: I1206 08:54:39.048880 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bztvd" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="registry-server" containerID="cri-o://ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2" gracePeriod=2 Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.564448 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.695180 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities\") pod \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.695334 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content\") pod \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.695497 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx5bl\" (UniqueName: \"kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl\") pod \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\" (UID: \"66c0baf4-0049-4023-8ff8-4783bcdd8f06\") " Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.697224 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities" (OuterVolumeSpecName: "utilities") pod "66c0baf4-0049-4023-8ff8-4783bcdd8f06" (UID: "66c0baf4-0049-4023-8ff8-4783bcdd8f06"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.700803 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl" (OuterVolumeSpecName: "kube-api-access-fx5bl") pod "66c0baf4-0049-4023-8ff8-4783bcdd8f06" (UID: "66c0baf4-0049-4023-8ff8-4783bcdd8f06"). InnerVolumeSpecName "kube-api-access-fx5bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.799025 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx5bl\" (UniqueName: \"kubernetes.io/projected/66c0baf4-0049-4023-8ff8-4783bcdd8f06-kube-api-access-fx5bl\") on node \"crc\" DevicePath \"\"" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.799067 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.803601 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66c0baf4-0049-4023-8ff8-4783bcdd8f06" (UID: "66c0baf4-0049-4023-8ff8-4783bcdd8f06"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:54:40 crc kubenswrapper[4763]: I1206 08:54:40.900697 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66c0baf4-0049-4023-8ff8-4783bcdd8f06-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.077013 4763 generic.go:334] "Generic (PLEG): container finished" podID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerID="ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2" exitCode=0 Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.077242 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerDied","Data":"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2"} Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.077265 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bztvd" event={"ID":"66c0baf4-0049-4023-8ff8-4783bcdd8f06","Type":"ContainerDied","Data":"f5e242860d5e12253f7240a4d537014b74cfe71a8dcd3cca3eeaedef81d84218"} Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.077282 4763 scope.go:117] "RemoveContainer" containerID="ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.077384 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bztvd" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.110322 4763 scope.go:117] "RemoveContainer" containerID="620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.129053 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.139482 4763 scope.go:117] "RemoveContainer" containerID="03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.140757 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bztvd"] Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.192141 4763 scope.go:117] "RemoveContainer" containerID="ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2" Dec 06 08:54:41 crc kubenswrapper[4763]: E1206 08:54:41.192767 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2\": container with ID starting with ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2 not found: ID does not exist" containerID="ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.192809 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2"} err="failed to get container status \"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2\": rpc error: code = NotFound desc = could not find container \"ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2\": container with ID starting with ff85da030a6c0fbc9ae4ec4b19bd26f8b3747676cc1d92b4cbeb327f6a300fc2 not found: ID does not exist" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.192829 4763 scope.go:117] "RemoveContainer" containerID="620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6" Dec 06 08:54:41 crc kubenswrapper[4763]: E1206 08:54:41.193273 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6\": container with ID starting with 620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6 not found: ID does not exist" containerID="620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.193329 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6"} err="failed to get container status \"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6\": rpc error: code = NotFound desc = could not find container \"620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6\": container with ID starting with 620d6c50478185063c4d21891a589fbe91bcdf1049c1c0ee83c9514d51466ad6 not found: ID does not exist" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.193362 4763 scope.go:117] "RemoveContainer" containerID="03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36" Dec 06 08:54:41 crc kubenswrapper[4763]: E1206 08:54:41.193772 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36\": container with ID starting with 03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36 not found: ID does not exist" containerID="03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.193798 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36"} err="failed to get container status \"03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36\": rpc error: code = NotFound desc = could not find container \"03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36\": container with ID starting with 03d55af34d4e964302bc1623108a98b035613cd64d17434a4ec5042c7ce18a36 not found: ID does not exist" Dec 06 08:54:41 crc kubenswrapper[4763]: I1206 08:54:41.731781 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" path="/var/lib/kubelet/pods/66c0baf4-0049-4023-8ff8-4783bcdd8f06/volumes" Dec 06 08:54:50 crc kubenswrapper[4763]: I1206 08:54:50.718918 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:54:51 crc kubenswrapper[4763]: I1206 08:54:51.169171 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9"} Dec 06 08:54:59 crc kubenswrapper[4763]: I1206 08:54:59.238522 4763 generic.go:334] "Generic (PLEG): container finished" podID="d118117b-51ec-4b2f-ae42-61af6c35ba88" containerID="e51b7dbb88570f5fafe815558e75195c9342be9d223a848578d018032dc26720" exitCode=0 Dec 06 08:54:59 crc kubenswrapper[4763]: I1206 08:54:59.238611 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" event={"ID":"d118117b-51ec-4b2f-ae42-61af6c35ba88","Type":"ContainerDied","Data":"e51b7dbb88570f5fafe815558e75195c9342be9d223a848578d018032dc26720"} Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.689239 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795483 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795552 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795614 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795651 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795682 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795765 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-449n6\" (UniqueName: \"kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795805 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795934 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.795981 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle\") pod \"d118117b-51ec-4b2f-ae42-61af6c35ba88\" (UID: \"d118117b-51ec-4b2f-ae42-61af6c35ba88\") " Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.801306 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6" (OuterVolumeSpecName: "kube-api-access-449n6") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "kube-api-access-449n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.807684 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.826293 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.829703 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.830926 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory" (OuterVolumeSpecName: "inventory") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.831333 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.839942 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.841578 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.851941 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d118117b-51ec-4b2f-ae42-61af6c35ba88" (UID: "d118117b-51ec-4b2f-ae42-61af6c35ba88"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.899704 4763 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900489 4763 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900585 4763 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900648 4763 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900793 4763 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900881 4763 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.900972 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.901057 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-449n6\" (UniqueName: \"kubernetes.io/projected/d118117b-51ec-4b2f-ae42-61af6c35ba88-kube-api-access-449n6\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:00 crc kubenswrapper[4763]: I1206 08:55:00.901145 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d118117b-51ec-4b2f-ae42-61af6c35ba88-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.257834 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" event={"ID":"d118117b-51ec-4b2f-ae42-61af6c35ba88","Type":"ContainerDied","Data":"ad26bbdd78b90cb973c3b148d9fc1865b75720ee647beb95f8a99eaa1d80f9dc"} Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.258177 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad26bbdd78b90cb973c3b148d9fc1865b75720ee647beb95f8a99eaa1d80f9dc" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.257913 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-nhr4v" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.411482 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z"] Dec 06 08:55:01 crc kubenswrapper[4763]: E1206 08:55:01.412988 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="extract-content" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.413008 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="extract-content" Dec 06 08:55:01 crc kubenswrapper[4763]: E1206 08:55:01.413059 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="registry-server" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.413066 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="registry-server" Dec 06 08:55:01 crc kubenswrapper[4763]: E1206 08:55:01.413090 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="extract-utilities" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.413097 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="extract-utilities" Dec 06 08:55:01 crc kubenswrapper[4763]: E1206 08:55:01.413111 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d118117b-51ec-4b2f-ae42-61af6c35ba88" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.413118 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d118117b-51ec-4b2f-ae42-61af6c35ba88" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.414120 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="66c0baf4-0049-4023-8ff8-4783bcdd8f06" containerName="registry-server" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.414151 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d118117b-51ec-4b2f-ae42-61af6c35ba88" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.415886 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.420592 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.423394 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.426559 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.426992 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cfq2p" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.428334 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.451592 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z"] Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.513827 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.513882 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmj7k\" (UniqueName: \"kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.513942 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.514008 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.514033 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.514065 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.514089 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615624 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615694 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmj7k\" (UniqueName: \"kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615746 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615842 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615868 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615943 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.615965 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.619383 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.620612 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.620668 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.621007 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.621521 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.624588 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.638635 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmj7k\" (UniqueName: \"kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:01 crc kubenswrapper[4763]: I1206 08:55:01.740315 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:55:02 crc kubenswrapper[4763]: W1206 08:55:02.264815 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb51e4fc2_25a8_4d25_bb8e_8cbaf9cf7e59.slice/crio-a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad WatchSource:0}: Error finding container a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad: Status 404 returned error can't find the container with id a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad Dec 06 08:55:02 crc kubenswrapper[4763]: I1206 08:55:02.267780 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z"] Dec 06 08:55:03 crc kubenswrapper[4763]: I1206 08:55:03.279180 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" event={"ID":"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59","Type":"ContainerStarted","Data":"0bdcea55ab28cbc4af11acbe421a6dbe0b92332500fd7b6483a9eab98217af8a"} Dec 06 08:55:03 crc kubenswrapper[4763]: I1206 08:55:03.279516 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" event={"ID":"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59","Type":"ContainerStarted","Data":"a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad"} Dec 06 08:55:03 crc kubenswrapper[4763]: I1206 08:55:03.299201 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" podStartSLOduration=1.902580519 podStartE2EDuration="2.299180442s" podCreationTimestamp="2025-12-06 08:55:01 +0000 UTC" firstStartedPulling="2025-12-06 08:55:02.27039799 +0000 UTC m=+2584.846103028" lastFinishedPulling="2025-12-06 08:55:02.666997913 +0000 UTC m=+2585.242702951" observedRunningTime="2025-12-06 08:55:03.29505263 +0000 UTC m=+2585.870757668" watchObservedRunningTime="2025-12-06 08:55:03.299180442 +0000 UTC m=+2585.874885480" Dec 06 08:57:12 crc kubenswrapper[4763]: I1206 08:57:12.536477 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:57:12 crc kubenswrapper[4763]: I1206 08:57:12.537015 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:57:18 crc kubenswrapper[4763]: I1206 08:57:18.443396 4763 generic.go:334] "Generic (PLEG): container finished" podID="b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" containerID="0bdcea55ab28cbc4af11acbe421a6dbe0b92332500fd7b6483a9eab98217af8a" exitCode=0 Dec 06 08:57:18 crc kubenswrapper[4763]: I1206 08:57:18.443475 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" event={"ID":"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59","Type":"ContainerDied","Data":"0bdcea55ab28cbc4af11acbe421a6dbe0b92332500fd7b6483a9eab98217af8a"} Dec 06 08:57:19 crc kubenswrapper[4763]: I1206 08:57:19.879749 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.006963 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007582 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmj7k\" (UniqueName: \"kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007739 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007847 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007921 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007960 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.007992 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2\") pod \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\" (UID: \"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59\") " Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.015579 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k" (OuterVolumeSpecName: "kube-api-access-xmj7k") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "kube-api-access-xmj7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.035297 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.043068 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.043528 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.044778 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.045994 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory" (OuterVolumeSpecName: "inventory") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.062103 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" (UID: "b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109856 4763 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-inventory\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109915 4763 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109932 4763 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109945 4763 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109957 4763 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109966 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.109974 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmj7k\" (UniqueName: \"kubernetes.io/projected/b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59-kube-api-access-xmj7k\") on node \"crc\" DevicePath \"\"" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.465832 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" event={"ID":"b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59","Type":"ContainerDied","Data":"a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad"} Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.465873 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5b5e7478f4dbb367f512f4fde1aa56dc0ed1f3e9b9acc8918b78805a073e4ad" Dec 06 08:57:20 crc kubenswrapper[4763]: I1206 08:57:20.465892 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z" Dec 06 08:57:42 crc kubenswrapper[4763]: I1206 08:57:42.536801 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:57:42 crc kubenswrapper[4763]: I1206 08:57:42.537742 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.249562 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: E1206 08:57:59.250428 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.250441 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.250653 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.251739 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.255376 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.264017 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282226 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282285 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-sys\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282413 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-dev\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282439 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282490 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282559 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-lib-modules\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282583 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-scripts\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282634 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data-custom\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282675 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282703 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-nvme\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282748 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282784 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gm5q\" (UniqueName: \"kubernetes.io/projected/34c058b8-cdf5-4041-8667-b39f337a908c-kube-api-access-4gm5q\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282861 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282885 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.282952 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-run\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.365953 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.367958 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.375280 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-config-data" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.379809 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387367 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387422 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-lib-modules\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387450 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-scripts\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387484 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data-custom\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387519 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387542 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-nvme\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387563 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387594 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gm5q\" (UniqueName: \"kubernetes.io/projected/34c058b8-cdf5-4041-8667-b39f337a908c-kube-api-access-4gm5q\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387651 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387671 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387712 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-run\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387742 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387770 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-sys\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387838 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-dev\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.387863 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.388219 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.388262 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.388288 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-lib-modules\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.389382 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.389415 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-run\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.390679 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-sys\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.390883 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.390982 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.391010 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-dev\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.391065 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/34c058b8-cdf5-4041-8667-b39f337a908c-etc-nvme\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.399150 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.400756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data-custom\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.401401 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.403858 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-2-config-data" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.411873 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gm5q\" (UniqueName: \"kubernetes.io/projected/34c058b8-cdf5-4041-8667-b39f337a908c-kube-api-access-4gm5q\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.415623 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.417650 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.427441 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-config-data\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.432436 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34c058b8-cdf5-4041-8667-b39f337a908c-scripts\") pod \"cinder-backup-0\" (UID: \"34c058b8-cdf5-4041-8667-b39f337a908c\") " pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.489770 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490186 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490303 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490352 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490377 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnzp7\" (UniqueName: \"kubernetes.io/projected/75dd8383-5523-4f4f-ad1a-d59db9482fa3-kube-api-access-tnzp7\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490412 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490526 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-sys\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490549 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490573 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490616 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490660 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-run\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490700 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490747 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490788 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490845 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.490880 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491071 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491123 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491189 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491221 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491432 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491487 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491531 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491554 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491587 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491610 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491682 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491739 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-dev\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491759 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.491793 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdhsm\" (UniqueName: \"kubernetes.io/projected/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-kube-api-access-fdhsm\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.571156 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.593939 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.593990 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594011 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594052 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594077 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594080 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594103 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594120 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594117 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594153 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594173 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594201 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594214 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594233 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594250 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594280 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594304 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594320 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-dev\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594342 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdhsm\" (UniqueName: \"kubernetes.io/projected/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-kube-api-access-fdhsm\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594362 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594405 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594426 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594445 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnzp7\" (UniqueName: \"kubernetes.io/projected/75dd8383-5523-4f4f-ad1a-d59db9482fa3-kube-api-access-tnzp7\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594459 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594477 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594502 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-sys\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594517 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594530 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594550 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594568 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-run\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594585 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594603 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594685 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594742 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594773 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.594137 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595104 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595148 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595170 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595184 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-sys\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595204 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595222 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595505 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595530 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-dev\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595560 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595542 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595622 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595662 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-run\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595709 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.595721 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75dd8383-5523-4f4f-ad1a-d59db9482fa3-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.599322 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.599707 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.600308 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.600382 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.600882 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.602547 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.603547 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.605753 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75dd8383-5523-4f4f-ad1a-d59db9482fa3-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.618522 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnzp7\" (UniqueName: \"kubernetes.io/projected/75dd8383-5523-4f4f-ad1a-d59db9482fa3-kube-api-access-tnzp7\") pod \"cinder-volume-nfs-0\" (UID: \"75dd8383-5523-4f4f-ad1a-d59db9482fa3\") " pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.622574 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdhsm\" (UniqueName: \"kubernetes.io/projected/edca2cd6-b0e5-4fe8-b53a-a23ab02a568f-kube-api-access-fdhsm\") pod \"cinder-volume-nfs-2-0\" (UID: \"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f\") " pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.690997 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Dec 06 08:57:59 crc kubenswrapper[4763]: I1206 08:57:59.842976 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.238626 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.433848 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Dec 06 08:58:00 crc kubenswrapper[4763]: W1206 08:58:00.435122 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75dd8383_5523_4f4f_ad1a_d59db9482fa3.slice/crio-7c53588c59134e0e2434c174732dfbed411b9fd475a231d996bfff4996e62917 WatchSource:0}: Error finding container 7c53588c59134e0e2434c174732dfbed411b9fd475a231d996bfff4996e62917: Status 404 returned error can't find the container with id 7c53588c59134e0e2434c174732dfbed411b9fd475a231d996bfff4996e62917 Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.546012 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Dec 06 08:58:00 crc kubenswrapper[4763]: W1206 08:58:00.629265 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedca2cd6_b0e5_4fe8_b53a_a23ab02a568f.slice/crio-ecb3bd6fc9894dfba022432fb39e243c8a3726bb702ac3e1499d52f6d7f1723e WatchSource:0}: Error finding container ecb3bd6fc9894dfba022432fb39e243c8a3726bb702ac3e1499d52f6d7f1723e: Status 404 returned error can't find the container with id ecb3bd6fc9894dfba022432fb39e243c8a3726bb702ac3e1499d52f6d7f1723e Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.849956 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"34c058b8-cdf5-4041-8667-b39f337a908c","Type":"ContainerStarted","Data":"38486d03f724d4a682e2f04e1b237bf3e2b4f481db84ee93245678b8ba9f2006"} Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.852107 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f","Type":"ContainerStarted","Data":"ecb3bd6fc9894dfba022432fb39e243c8a3726bb702ac3e1499d52f6d7f1723e"} Dec 06 08:58:00 crc kubenswrapper[4763]: I1206 08:58:00.853702 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"75dd8383-5523-4f4f-ad1a-d59db9482fa3","Type":"ContainerStarted","Data":"7c53588c59134e0e2434c174732dfbed411b9fd475a231d996bfff4996e62917"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.864056 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"34c058b8-cdf5-4041-8667-b39f337a908c","Type":"ContainerStarted","Data":"1cef7ed53b83e163e3fee2e497b9daf2c9a8767dc7cc8da9e61c232a8655220d"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.864563 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"34c058b8-cdf5-4041-8667-b39f337a908c","Type":"ContainerStarted","Data":"3691c7596e1d60557e581a468427f2910ef5cc79bd0ca22ac48664fa057c512b"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.868628 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f","Type":"ContainerStarted","Data":"3f31eb12143ee8093c622500db1b5f61fd33d20bd471316f745657afaeca489b"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.868673 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"edca2cd6-b0e5-4fe8-b53a-a23ab02a568f","Type":"ContainerStarted","Data":"303bb3ab19828d4949489fb9853d018e2771b769651cfd43a5581b904cad2cdc"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.870455 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"75dd8383-5523-4f4f-ad1a-d59db9482fa3","Type":"ContainerStarted","Data":"2f18c74d0da6829087b99cc31c164b4df82aca8e1e6c81c4214c7d5c4a7fe6cf"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.870581 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"75dd8383-5523-4f4f-ad1a-d59db9482fa3","Type":"ContainerStarted","Data":"b53b4887ff6bd789ad88a72c84b0b09c8f677d103256a459c021aa93b411dace"} Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.895125 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.729637575 podStartE2EDuration="2.895106993s" podCreationTimestamp="2025-12-06 08:57:59 +0000 UTC" firstStartedPulling="2025-12-06 08:58:00.243860726 +0000 UTC m=+2762.819565754" lastFinishedPulling="2025-12-06 08:58:00.409330134 +0000 UTC m=+2762.985035172" observedRunningTime="2025-12-06 08:58:01.887884177 +0000 UTC m=+2764.463589205" watchObservedRunningTime="2025-12-06 08:58:01.895106993 +0000 UTC m=+2764.470812031" Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.918656 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-2-0" podStartSLOduration=2.91863762 podStartE2EDuration="2.91863762s" podCreationTimestamp="2025-12-06 08:57:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:58:01.911302901 +0000 UTC m=+2764.487007959" watchObservedRunningTime="2025-12-06 08:58:01.91863762 +0000 UTC m=+2764.494342658" Dec 06 08:58:01 crc kubenswrapper[4763]: I1206 08:58:01.943047 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-0" podStartSLOduration=2.721974707 podStartE2EDuration="2.943030109s" podCreationTimestamp="2025-12-06 08:57:59 +0000 UTC" firstStartedPulling="2025-12-06 08:58:00.436626253 +0000 UTC m=+2763.012331291" lastFinishedPulling="2025-12-06 08:58:00.657681645 +0000 UTC m=+2763.233386693" observedRunningTime="2025-12-06 08:58:01.938133357 +0000 UTC m=+2764.513838395" watchObservedRunningTime="2025-12-06 08:58:01.943030109 +0000 UTC m=+2764.518735147" Dec 06 08:58:04 crc kubenswrapper[4763]: I1206 08:58:04.571766 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 06 08:58:04 crc kubenswrapper[4763]: I1206 08:58:04.693067 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-0" Dec 06 08:58:04 crc kubenswrapper[4763]: I1206 08:58:04.843483 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:58:09 crc kubenswrapper[4763]: I1206 08:58:09.799357 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 06 08:58:09 crc kubenswrapper[4763]: I1206 08:58:09.911560 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-0" Dec 06 08:58:10 crc kubenswrapper[4763]: I1206 08:58:10.085586 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-2-0" Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.541520 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.541838 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.541883 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.542674 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.542719 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9" gracePeriod=600 Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.968331 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9" exitCode=0 Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.968389 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9"} Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.968633 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3"} Dec 06 08:58:12 crc kubenswrapper[4763]: I1206 08:58:12.968658 4763 scope.go:117] "RemoveContainer" containerID="663c43a7106f90c0e754c4401dd592813cda0f5191fc073f7f9c8fb8806f0be6" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.159023 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.163348 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.184764 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.270202 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.270707 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v2d7\" (UniqueName: \"kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.270770 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.372962 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.373083 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v2d7\" (UniqueName: \"kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.373120 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.373739 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.373976 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.394416 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v2d7\" (UniqueName: \"kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7\") pod \"certified-operators-xjfxz\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.483870 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:43 crc kubenswrapper[4763]: I1206 08:58:43.858718 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:43 crc kubenswrapper[4763]: W1206 08:58:43.874107 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b3beb68_ef6e_425d_b1ff_9b74349a0ac6.slice/crio-8df5b6e72a09b05c8a348a23147f3a7424591f4a3769b7a39dfaa985f294d994 WatchSource:0}: Error finding container 8df5b6e72a09b05c8a348a23147f3a7424591f4a3769b7a39dfaa985f294d994: Status 404 returned error can't find the container with id 8df5b6e72a09b05c8a348a23147f3a7424591f4a3769b7a39dfaa985f294d994 Dec 06 08:58:44 crc kubenswrapper[4763]: I1206 08:58:44.267332 4763 generic.go:334] "Generic (PLEG): container finished" podID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerID="6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177" exitCode=0 Dec 06 08:58:44 crc kubenswrapper[4763]: I1206 08:58:44.267408 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerDied","Data":"6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177"} Dec 06 08:58:44 crc kubenswrapper[4763]: I1206 08:58:44.267540 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerStarted","Data":"8df5b6e72a09b05c8a348a23147f3a7424591f4a3769b7a39dfaa985f294d994"} Dec 06 08:58:45 crc kubenswrapper[4763]: I1206 08:58:45.279416 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerStarted","Data":"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53"} Dec 06 08:58:46 crc kubenswrapper[4763]: I1206 08:58:46.292301 4763 generic.go:334] "Generic (PLEG): container finished" podID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerID="9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53" exitCode=0 Dec 06 08:58:46 crc kubenswrapper[4763]: I1206 08:58:46.292363 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerDied","Data":"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53"} Dec 06 08:58:47 crc kubenswrapper[4763]: I1206 08:58:47.305642 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerStarted","Data":"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f"} Dec 06 08:58:47 crc kubenswrapper[4763]: I1206 08:58:47.330390 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xjfxz" podStartSLOduration=1.901576746 podStartE2EDuration="4.330368605s" podCreationTimestamp="2025-12-06 08:58:43 +0000 UTC" firstStartedPulling="2025-12-06 08:58:44.268642307 +0000 UTC m=+2806.844347345" lastFinishedPulling="2025-12-06 08:58:46.697434156 +0000 UTC m=+2809.273139204" observedRunningTime="2025-12-06 08:58:47.321319889 +0000 UTC m=+2809.897024937" watchObservedRunningTime="2025-12-06 08:58:47.330368605 +0000 UTC m=+2809.906073663" Dec 06 08:58:53 crc kubenswrapper[4763]: I1206 08:58:53.484680 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:53 crc kubenswrapper[4763]: I1206 08:58:53.485335 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:53 crc kubenswrapper[4763]: I1206 08:58:53.531115 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:54 crc kubenswrapper[4763]: I1206 08:58:54.419397 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:54 crc kubenswrapper[4763]: I1206 08:58:54.466808 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:56 crc kubenswrapper[4763]: I1206 08:58:56.384277 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xjfxz" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="registry-server" containerID="cri-o://7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f" gracePeriod=2 Dec 06 08:58:56 crc kubenswrapper[4763]: I1206 08:58:56.917225 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.069784 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content\") pod \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.070073 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v2d7\" (UniqueName: \"kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7\") pod \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.070095 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities\") pod \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\" (UID: \"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6\") " Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.071003 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities" (OuterVolumeSpecName: "utilities") pod "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" (UID: "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.071712 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.077500 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7" (OuterVolumeSpecName: "kube-api-access-7v2d7") pod "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" (UID: "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6"). InnerVolumeSpecName "kube-api-access-7v2d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.126581 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" (UID: "1b3beb68-ef6e-425d-b1ff-9b74349a0ac6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.174092 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v2d7\" (UniqueName: \"kubernetes.io/projected/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-kube-api-access-7v2d7\") on node \"crc\" DevicePath \"\"" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.174130 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.394433 4763 generic.go:334] "Generic (PLEG): container finished" podID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerID="7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f" exitCode=0 Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.394513 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjfxz" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.394486 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerDied","Data":"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f"} Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.394657 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjfxz" event={"ID":"1b3beb68-ef6e-425d-b1ff-9b74349a0ac6","Type":"ContainerDied","Data":"8df5b6e72a09b05c8a348a23147f3a7424591f4a3769b7a39dfaa985f294d994"} Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.394681 4763 scope.go:117] "RemoveContainer" containerID="7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.422088 4763 scope.go:117] "RemoveContainer" containerID="9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.435273 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.445950 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xjfxz"] Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.460369 4763 scope.go:117] "RemoveContainer" containerID="6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.490406 4763 scope.go:117] "RemoveContainer" containerID="7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f" Dec 06 08:58:57 crc kubenswrapper[4763]: E1206 08:58:57.490864 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f\": container with ID starting with 7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f not found: ID does not exist" containerID="7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.490987 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f"} err="failed to get container status \"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f\": rpc error: code = NotFound desc = could not find container \"7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f\": container with ID starting with 7b3429729b692ad39c9937ac9d0c4b53327dd8f49ede72d3b7deab31142c905f not found: ID does not exist" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.491097 4763 scope.go:117] "RemoveContainer" containerID="9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53" Dec 06 08:58:57 crc kubenswrapper[4763]: E1206 08:58:57.491410 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53\": container with ID starting with 9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53 not found: ID does not exist" containerID="9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.491458 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53"} err="failed to get container status \"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53\": rpc error: code = NotFound desc = could not find container \"9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53\": container with ID starting with 9b0ddb27d09ab9c4ed5bc601bd1cf57c57bda16c75964efe7222c24089d82e53 not found: ID does not exist" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.491485 4763 scope.go:117] "RemoveContainer" containerID="6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177" Dec 06 08:58:57 crc kubenswrapper[4763]: E1206 08:58:57.491854 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177\": container with ID starting with 6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177 not found: ID does not exist" containerID="6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.491878 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177"} err="failed to get container status \"6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177\": rpc error: code = NotFound desc = could not find container \"6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177\": container with ID starting with 6219cad99461c62464287a4d315b5d6919f3b6ecfde76c1bfc8bd3a554e48177 not found: ID does not exist" Dec 06 08:58:57 crc kubenswrapper[4763]: I1206 08:58:57.733226 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" path="/var/lib/kubelet/pods/1b3beb68-ef6e-425d-b1ff-9b74349a0ac6/volumes" Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.219282 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.220095 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="prometheus" containerID="cri-o://026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468" gracePeriod=600 Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.220606 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="thanos-sidecar" containerID="cri-o://6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd" gracePeriod=600 Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.220668 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="config-reloader" containerID="cri-o://85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab" gracePeriod=600 Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.429676 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="prometheus" probeResult="failure" output="Get \"https://10.217.0.130:9090/-/ready\": dial tcp 10.217.0.130:9090: connect: connection refused" Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.491263 4763 generic.go:334] "Generic (PLEG): container finished" podID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerID="6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd" exitCode=0 Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.491293 4763 generic.go:334] "Generic (PLEG): container finished" podID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerID="026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468" exitCode=0 Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.491314 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerDied","Data":"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd"} Dec 06 08:59:06 crc kubenswrapper[4763]: I1206 08:59:06.491338 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerDied","Data":"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468"} Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.213141 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.270906 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.270968 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271008 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271037 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271072 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271105 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271127 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271164 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271244 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcsmn\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271265 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.271293 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0\") pod \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\" (UID: \"e2a10389-6ee5-4381-b89b-b2ec5b3ab985\") " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.272229 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.282333 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config" (OuterVolumeSpecName: "config") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.282310 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out" (OuterVolumeSpecName: "config-out") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.283411 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.283475 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.283469 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.283967 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.285651 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.299250 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn" (OuterVolumeSpecName: "kube-api-access-tcsmn") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "kube-api-access-tcsmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373308 4763 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373346 4763 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373361 4763 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373372 4763 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373384 4763 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-config-out\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373396 4763 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373407 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcsmn\" (UniqueName: \"kubernetes.io/projected/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-kube-api-access-tcsmn\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373419 4763 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.373434 4763 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.385440 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config" (OuterVolumeSpecName: "web-config") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.386670 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "e2a10389-6ee5-4381-b89b-b2ec5b3ab985" (UID: "e2a10389-6ee5-4381-b89b-b2ec5b3ab985"). InnerVolumeSpecName "pvc-4183127b-ba41-4321-a678-4acbb0114b73". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.475487 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") on node \"crc\" " Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.475532 4763 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e2a10389-6ee5-4381-b89b-b2ec5b3ab985-web-config\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.503766 4763 generic.go:334] "Generic (PLEG): container finished" podID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerID="85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab" exitCode=0 Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.503810 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerDied","Data":"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab"} Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.503828 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.503934 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"e2a10389-6ee5-4381-b89b-b2ec5b3ab985","Type":"ContainerDied","Data":"db7b747bbebe15ab6bf20c821e5dfdfeda9865d10cc435d9e7de52c8984ebdd5"} Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.503986 4763 scope.go:117] "RemoveContainer" containerID="6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.514631 4763 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.515097 4763 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4183127b-ba41-4321-a678-4acbb0114b73" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73") on node "crc" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.526676 4763 scope.go:117] "RemoveContainer" containerID="85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.540845 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.550599 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.555965 4763 scope.go:117] "RemoveContainer" containerID="026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577277 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577591 4763 reconciler_common.go:293] "Volume detached for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") on node \"crc\" DevicePath \"\"" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577788 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="init-config-reloader" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577806 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="init-config-reloader" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577824 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="config-reloader" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577832 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="config-reloader" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577870 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="extract-utilities" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577879 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="extract-utilities" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577914 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="registry-server" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577924 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="registry-server" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577938 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="extract-content" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577946 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="extract-content" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577960 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="thanos-sidecar" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577968 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="thanos-sidecar" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.577983 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="prometheus" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.577991 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="prometheus" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.578241 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="thanos-sidecar" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.578267 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b3beb68-ef6e-425d-b1ff-9b74349a0ac6" containerName="registry-server" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.578292 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="prometheus" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.578312 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" containerName="config-reloader" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.579035 4763 scope.go:117] "RemoveContainer" containerID="a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.580666 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.583991 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-b6l5c" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.583992 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.584064 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.583994 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.584648 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.591380 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.598368 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.600951 4763 scope.go:117] "RemoveContainer" containerID="6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.619731 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd\": container with ID starting with 6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd not found: ID does not exist" containerID="6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.619785 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd"} err="failed to get container status \"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd\": rpc error: code = NotFound desc = could not find container \"6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd\": container with ID starting with 6a6e7ebb081b5019366ef6276371ca8e12103e7a38f80de59c82429441f410cd not found: ID does not exist" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.619819 4763 scope.go:117] "RemoveContainer" containerID="85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.620430 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab\": container with ID starting with 85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab not found: ID does not exist" containerID="85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.620476 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab"} err="failed to get container status \"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab\": rpc error: code = NotFound desc = could not find container \"85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab\": container with ID starting with 85e5edeb6eefb69d4eede568b7ce3216ce654dc3d1e99e8e99b5792c8b7c5eab not found: ID does not exist" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.620509 4763 scope.go:117] "RemoveContainer" containerID="026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.624814 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468\": container with ID starting with 026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468 not found: ID does not exist" containerID="026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.624851 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468"} err="failed to get container status \"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468\": rpc error: code = NotFound desc = could not find container \"026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468\": container with ID starting with 026aacab5d38e1c4c3817556da1c61fee28029723fa932832e26005498440468 not found: ID does not exist" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.624875 4763 scope.go:117] "RemoveContainer" containerID="a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0" Dec 06 08:59:07 crc kubenswrapper[4763]: E1206 08:59:07.627291 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0\": container with ID starting with a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0 not found: ID does not exist" containerID="a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.627361 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0"} err="failed to get container status \"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0\": rpc error: code = NotFound desc = could not find container \"a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0\": container with ID starting with a136df32776ae2dce997cb5c762ed49785ac16047b3a59596045dbac452df7a0 not found: ID does not exist" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.732850 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a10389-6ee5-4381-b89b-b2ec5b3ab985" path="/var/lib/kubelet/pods/e2a10389-6ee5-4381-b89b-b2ec5b3ab985/volumes" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781315 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781387 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781421 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/08ba2fdd-9289-44be-a218-38d34272f2b4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781487 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781520 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781550 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781588 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/08ba2fdd-9289-44be-a218-38d34272f2b4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781620 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh6xn\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-kube-api-access-fh6xn\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781695 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781756 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.781809 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884102 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884224 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884364 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884398 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884417 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/08ba2fdd-9289-44be-a218-38d34272f2b4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884468 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884502 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884525 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884567 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/08ba2fdd-9289-44be-a218-38d34272f2b4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884590 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh6xn\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-kube-api-access-fh6xn\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.884664 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.887118 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/08ba2fdd-9289-44be-a218-38d34272f2b4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.890500 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.891028 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.891285 4763 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.891306 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.891319 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b015cf8d820501a423550d1415408204b77e53ec4d768da3cd0e5c2a5ce9ba08/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.891680 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.892198 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.895518 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/08ba2fdd-9289-44be-a218-38d34272f2b4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.903754 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.906174 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/08ba2fdd-9289-44be-a218-38d34272f2b4-config\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.907743 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh6xn\" (UniqueName: \"kubernetes.io/projected/08ba2fdd-9289-44be-a218-38d34272f2b4-kube-api-access-fh6xn\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:07 crc kubenswrapper[4763]: I1206 08:59:07.950967 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4183127b-ba41-4321-a678-4acbb0114b73\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4183127b-ba41-4321-a678-4acbb0114b73\") pod \"prometheus-metric-storage-0\" (UID: \"08ba2fdd-9289-44be-a218-38d34272f2b4\") " pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:08 crc kubenswrapper[4763]: I1206 08:59:08.250848 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:08 crc kubenswrapper[4763]: I1206 08:59:08.784093 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 06 08:59:09 crc kubenswrapper[4763]: I1206 08:59:09.527493 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerStarted","Data":"db80907582cae9a117804d8546336ced6b759f789e2887efe8246b701341519b"} Dec 06 08:59:12 crc kubenswrapper[4763]: I1206 08:59:12.556141 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerStarted","Data":"bceb4258df8ebe2592ff7d723f4229da72dffd0984431898afddf0428c2c61a0"} Dec 06 08:59:20 crc kubenswrapper[4763]: I1206 08:59:20.635712 4763 generic.go:334] "Generic (PLEG): container finished" podID="08ba2fdd-9289-44be-a218-38d34272f2b4" containerID="bceb4258df8ebe2592ff7d723f4229da72dffd0984431898afddf0428c2c61a0" exitCode=0 Dec 06 08:59:20 crc kubenswrapper[4763]: I1206 08:59:20.635833 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerDied","Data":"bceb4258df8ebe2592ff7d723f4229da72dffd0984431898afddf0428c2c61a0"} Dec 06 08:59:21 crc kubenswrapper[4763]: I1206 08:59:21.656981 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerStarted","Data":"432afd19f67025e1e29e3797ed13a15d236d23185fdcd0b00a86e52099b76e9e"} Dec 06 08:59:24 crc kubenswrapper[4763]: I1206 08:59:24.684793 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerStarted","Data":"fd90b5c0baa515a8290c5c1b42644b81012d6bc4fdcbcb2053e9914e75e270fc"} Dec 06 08:59:24 crc kubenswrapper[4763]: I1206 08:59:24.685104 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"08ba2fdd-9289-44be-a218-38d34272f2b4","Type":"ContainerStarted","Data":"68e17bfa6d1437da7813a86845f48a3ff1ae68c01a49035f584442d2da096c34"} Dec 06 08:59:24 crc kubenswrapper[4763]: I1206 08:59:24.716679 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=17.716651651 podStartE2EDuration="17.716651651s" podCreationTimestamp="2025-12-06 08:59:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 08:59:24.704647606 +0000 UTC m=+2847.280352654" watchObservedRunningTime="2025-12-06 08:59:24.716651651 +0000 UTC m=+2847.292356699" Dec 06 08:59:28 crc kubenswrapper[4763]: I1206 08:59:28.252531 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:38 crc kubenswrapper[4763]: I1206 08:59:38.252109 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:38 crc kubenswrapper[4763]: I1206 08:59:38.258064 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 06 08:59:38 crc kubenswrapper[4763]: I1206 08:59:38.818504 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.139548 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9"] Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.141578 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.143654 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.145478 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.152207 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9"] Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.189788 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98xnh\" (UniqueName: \"kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.189929 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.189970 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.292493 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98xnh\" (UniqueName: \"kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.292598 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.292638 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.293701 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.308684 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.316585 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98xnh\" (UniqueName: \"kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh\") pod \"collect-profiles-29416860-hcfb9\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.470815 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:00 crc kubenswrapper[4763]: I1206 09:00:00.958945 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9"] Dec 06 09:00:01 crc kubenswrapper[4763]: I1206 09:00:01.017672 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" event={"ID":"e936afc1-13a4-430a-be88-db8d932387e1","Type":"ContainerStarted","Data":"8fd4637e147eb2e8622112435e712bac26b8039bbd00bd57b75c8c7d0d091f6c"} Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.030197 4763 generic.go:334] "Generic (PLEG): container finished" podID="e936afc1-13a4-430a-be88-db8d932387e1" containerID="ccecd7a313719e422608b01311571225d0f7e554677fbb7c1ebfae0bcdacbc0c" exitCode=0 Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.030267 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" event={"ID":"e936afc1-13a4-430a-be88-db8d932387e1","Type":"ContainerDied","Data":"ccecd7a313719e422608b01311571225d0f7e554677fbb7c1ebfae0bcdacbc0c"} Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.569942 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.574237 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.576280 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.582847 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-htlmh" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.583716 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.584301 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.616964 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641376 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641437 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641467 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641538 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9ppw\" (UniqueName: \"kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641745 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641819 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641928 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.641956 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.642002 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743637 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743698 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743723 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743811 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9ppw\" (UniqueName: \"kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743940 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.743966 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.744014 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.744030 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.744124 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.744159 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.744258 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.745197 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.745467 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.745938 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.750126 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.751104 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.755866 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.768064 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9ppw\" (UniqueName: \"kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.778512 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " pod="openstack/tempest-tests-tempest" Dec 06 09:00:02 crc kubenswrapper[4763]: I1206 09:00:02.903356 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.342353 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.356404 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.394736 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.457849 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume\") pod \"e936afc1-13a4-430a-be88-db8d932387e1\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.457926 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume\") pod \"e936afc1-13a4-430a-be88-db8d932387e1\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.457956 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98xnh\" (UniqueName: \"kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh\") pod \"e936afc1-13a4-430a-be88-db8d932387e1\" (UID: \"e936afc1-13a4-430a-be88-db8d932387e1\") " Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.460066 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume" (OuterVolumeSpecName: "config-volume") pod "e936afc1-13a4-430a-be88-db8d932387e1" (UID: "e936afc1-13a4-430a-be88-db8d932387e1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.463925 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e936afc1-13a4-430a-be88-db8d932387e1" (UID: "e936afc1-13a4-430a-be88-db8d932387e1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.470784 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh" (OuterVolumeSpecName: "kube-api-access-98xnh") pod "e936afc1-13a4-430a-be88-db8d932387e1" (UID: "e936afc1-13a4-430a-be88-db8d932387e1"). InnerVolumeSpecName "kube-api-access-98xnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.559291 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e936afc1-13a4-430a-be88-db8d932387e1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.559327 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e936afc1-13a4-430a-be88-db8d932387e1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:03 crc kubenswrapper[4763]: I1206 09:00:03.559337 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98xnh\" (UniqueName: \"kubernetes.io/projected/e936afc1-13a4-430a-be88-db8d932387e1-kube-api-access-98xnh\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.050194 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" event={"ID":"e936afc1-13a4-430a-be88-db8d932387e1","Type":"ContainerDied","Data":"8fd4637e147eb2e8622112435e712bac26b8039bbd00bd57b75c8c7d0d091f6c"} Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.050536 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fd4637e147eb2e8622112435e712bac26b8039bbd00bd57b75c8c7d0d091f6c" Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.050219 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9" Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.052240 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"11d2c295-2754-410c-bda4-4830b20b5ee8","Type":"ContainerStarted","Data":"5be96821446e609f13a3ae0368f52d2f4931920eedf06e160bd3098c9b541bf9"} Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.481722 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw"] Dec 06 09:00:04 crc kubenswrapper[4763]: I1206 09:00:04.494217 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416815-d9cnw"] Dec 06 09:00:06 crc kubenswrapper[4763]: I1206 09:00:06.195677 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="459c6292-1876-41a5-9546-9a87fd104b1a" path="/var/lib/kubelet/pods/459c6292-1876-41a5-9546-9a87fd104b1a/volumes" Dec 06 09:00:12 crc kubenswrapper[4763]: I1206 09:00:12.537133 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:00:12 crc kubenswrapper[4763]: I1206 09:00:12.537933 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:00:16 crc kubenswrapper[4763]: I1206 09:00:16.320173 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"11d2c295-2754-410c-bda4-4830b20b5ee8","Type":"ContainerStarted","Data":"4986c74c789840e1eac796dadbd387656e0280867ab7df55561200eb2b1c4995"} Dec 06 09:00:16 crc kubenswrapper[4763]: I1206 09:00:16.351088 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.535738124 podStartE2EDuration="15.351069267s" podCreationTimestamp="2025-12-06 09:00:01 +0000 UTC" firstStartedPulling="2025-12-06 09:00:03.356123161 +0000 UTC m=+2885.931828199" lastFinishedPulling="2025-12-06 09:00:15.171454314 +0000 UTC m=+2897.747159342" observedRunningTime="2025-12-06 09:00:16.346989796 +0000 UTC m=+2898.922694834" watchObservedRunningTime="2025-12-06 09:00:16.351069267 +0000 UTC m=+2898.926774305" Dec 06 09:00:24 crc kubenswrapper[4763]: I1206 09:00:24.095839 4763 scope.go:117] "RemoveContainer" containerID="2abd3b0852dea0756d020e4b9edb87b25fcdf770433c0fb4e835e6fb65d8d0b4" Dec 06 09:00:42 crc kubenswrapper[4763]: I1206 09:00:42.537118 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:00:42 crc kubenswrapper[4763]: I1206 09:00:42.537758 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:00:44 crc kubenswrapper[4763]: I1206 09:00:44.929920 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:44 crc kubenswrapper[4763]: E1206 09:00:44.931194 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e936afc1-13a4-430a-be88-db8d932387e1" containerName="collect-profiles" Dec 06 09:00:44 crc kubenswrapper[4763]: I1206 09:00:44.931217 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="e936afc1-13a4-430a-be88-db8d932387e1" containerName="collect-profiles" Dec 06 09:00:44 crc kubenswrapper[4763]: I1206 09:00:44.931615 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="e936afc1-13a4-430a-be88-db8d932387e1" containerName="collect-profiles" Dec 06 09:00:44 crc kubenswrapper[4763]: I1206 09:00:44.933717 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:44 crc kubenswrapper[4763]: I1206 09:00:44.943505 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.086046 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd95f\" (UniqueName: \"kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.086097 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.086686 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.189350 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd95f\" (UniqueName: \"kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.189416 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.189508 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.190149 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.190345 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.213025 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd95f\" (UniqueName: \"kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f\") pod \"redhat-marketplace-bsjlm\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.306426 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:45 crc kubenswrapper[4763]: I1206 09:00:45.837761 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:46 crc kubenswrapper[4763]: I1206 09:00:46.602455 4763 generic.go:334] "Generic (PLEG): container finished" podID="493fe206-88b1-4be8-b695-9ae130943f97" containerID="10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762" exitCode=0 Dec 06 09:00:46 crc kubenswrapper[4763]: I1206 09:00:46.602532 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerDied","Data":"10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762"} Dec 06 09:00:46 crc kubenswrapper[4763]: I1206 09:00:46.602775 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerStarted","Data":"b1518a027e7d53e572e82d8c545635919f454198a944280d430dee6ac9fd24b8"} Dec 06 09:00:47 crc kubenswrapper[4763]: I1206 09:00:47.611846 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerStarted","Data":"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a"} Dec 06 09:00:48 crc kubenswrapper[4763]: I1206 09:00:48.623181 4763 generic.go:334] "Generic (PLEG): container finished" podID="493fe206-88b1-4be8-b695-9ae130943f97" containerID="a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a" exitCode=0 Dec 06 09:00:48 crc kubenswrapper[4763]: I1206 09:00:48.623222 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerDied","Data":"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a"} Dec 06 09:00:49 crc kubenswrapper[4763]: I1206 09:00:49.636673 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerStarted","Data":"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd"} Dec 06 09:00:49 crc kubenswrapper[4763]: I1206 09:00:49.661057 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bsjlm" podStartSLOduration=3.280183939 podStartE2EDuration="5.661036614s" podCreationTimestamp="2025-12-06 09:00:44 +0000 UTC" firstStartedPulling="2025-12-06 09:00:46.605822989 +0000 UTC m=+2929.181528027" lastFinishedPulling="2025-12-06 09:00:48.986675664 +0000 UTC m=+2931.562380702" observedRunningTime="2025-12-06 09:00:49.65424205 +0000 UTC m=+2932.229947118" watchObservedRunningTime="2025-12-06 09:00:49.661036614 +0000 UTC m=+2932.236741652" Dec 06 09:00:55 crc kubenswrapper[4763]: I1206 09:00:55.307214 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:55 crc kubenswrapper[4763]: I1206 09:00:55.307673 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:55 crc kubenswrapper[4763]: I1206 09:00:55.364058 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:55 crc kubenswrapper[4763]: I1206 09:00:55.743988 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:55 crc kubenswrapper[4763]: I1206 09:00:55.804211 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:57 crc kubenswrapper[4763]: I1206 09:00:57.712667 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bsjlm" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="registry-server" containerID="cri-o://259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd" gracePeriod=2 Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.258567 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.368266 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd95f\" (UniqueName: \"kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f\") pod \"493fe206-88b1-4be8-b695-9ae130943f97\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.368547 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities\") pod \"493fe206-88b1-4be8-b695-9ae130943f97\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.368599 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content\") pod \"493fe206-88b1-4be8-b695-9ae130943f97\" (UID: \"493fe206-88b1-4be8-b695-9ae130943f97\") " Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.369013 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities" (OuterVolumeSpecName: "utilities") pod "493fe206-88b1-4be8-b695-9ae130943f97" (UID: "493fe206-88b1-4be8-b695-9ae130943f97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.369203 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.380281 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f" (OuterVolumeSpecName: "kube-api-access-zd95f") pod "493fe206-88b1-4be8-b695-9ae130943f97" (UID: "493fe206-88b1-4be8-b695-9ae130943f97"). InnerVolumeSpecName "kube-api-access-zd95f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.406073 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "493fe206-88b1-4be8-b695-9ae130943f97" (UID: "493fe206-88b1-4be8-b695-9ae130943f97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.470977 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/493fe206-88b1-4be8-b695-9ae130943f97-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.471011 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd95f\" (UniqueName: \"kubernetes.io/projected/493fe206-88b1-4be8-b695-9ae130943f97-kube-api-access-zd95f\") on node \"crc\" DevicePath \"\"" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.726591 4763 generic.go:334] "Generic (PLEG): container finished" podID="493fe206-88b1-4be8-b695-9ae130943f97" containerID="259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd" exitCode=0 Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.726667 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerDied","Data":"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd"} Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.726711 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsjlm" event={"ID":"493fe206-88b1-4be8-b695-9ae130943f97","Type":"ContainerDied","Data":"b1518a027e7d53e572e82d8c545635919f454198a944280d430dee6ac9fd24b8"} Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.726750 4763 scope.go:117] "RemoveContainer" containerID="259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.726997 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsjlm" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.760696 4763 scope.go:117] "RemoveContainer" containerID="a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.787601 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.803673 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsjlm"] Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.823501 4763 scope.go:117] "RemoveContainer" containerID="10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.866195 4763 scope.go:117] "RemoveContainer" containerID="259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd" Dec 06 09:00:58 crc kubenswrapper[4763]: E1206 09:00:58.867119 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd\": container with ID starting with 259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd not found: ID does not exist" containerID="259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.867195 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd"} err="failed to get container status \"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd\": rpc error: code = NotFound desc = could not find container \"259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd\": container with ID starting with 259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd not found: ID does not exist" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.867280 4763 scope.go:117] "RemoveContainer" containerID="a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a" Dec 06 09:00:58 crc kubenswrapper[4763]: E1206 09:00:58.868224 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a\": container with ID starting with a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a not found: ID does not exist" containerID="a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.868267 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a"} err="failed to get container status \"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a\": rpc error: code = NotFound desc = could not find container \"a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a\": container with ID starting with a3c23ebf5c5d281d68598d4a86b602ca06a0e02ce15d558c545b25c3a344450a not found: ID does not exist" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.868378 4763 scope.go:117] "RemoveContainer" containerID="10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762" Dec 06 09:00:58 crc kubenswrapper[4763]: E1206 09:00:58.868877 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762\": container with ID starting with 10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762 not found: ID does not exist" containerID="10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762" Dec 06 09:00:58 crc kubenswrapper[4763]: I1206 09:00:58.868939 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762"} err="failed to get container status \"10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762\": rpc error: code = NotFound desc = could not find container \"10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762\": container with ID starting with 10a6946b2c83cc5c322dfa3e928d79c706c695fc542eaaebb590f10f953de762 not found: ID does not exist" Dec 06 09:00:59 crc kubenswrapper[4763]: I1206 09:00:59.735726 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="493fe206-88b1-4be8-b695-9ae130943f97" path="/var/lib/kubelet/pods/493fe206-88b1-4be8-b695-9ae130943f97/volumes" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.147584 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29416861-n4vcz"] Dec 06 09:01:00 crc kubenswrapper[4763]: E1206 09:01:00.148035 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="extract-utilities" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.148051 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="extract-utilities" Dec 06 09:01:00 crc kubenswrapper[4763]: E1206 09:01:00.148069 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="registry-server" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.148075 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="registry-server" Dec 06 09:01:00 crc kubenswrapper[4763]: E1206 09:01:00.148097 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="extract-content" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.148103 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="extract-content" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.148314 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="493fe206-88b1-4be8-b695-9ae130943f97" containerName="registry-server" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.149087 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.156201 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29416861-n4vcz"] Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.341818 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.341870 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.341934 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf6md\" (UniqueName: \"kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.342010 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.444103 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.444172 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.444256 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf6md\" (UniqueName: \"kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.444307 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.450357 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.450814 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.453387 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.472120 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf6md\" (UniqueName: \"kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md\") pod \"keystone-cron-29416861-n4vcz\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.506452 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:00 crc kubenswrapper[4763]: I1206 09:01:00.942222 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29416861-n4vcz"] Dec 06 09:01:01 crc kubenswrapper[4763]: I1206 09:01:01.759413 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416861-n4vcz" event={"ID":"9a670801-b117-4ea7-b37b-b28dd79aa1c1","Type":"ContainerStarted","Data":"e4211d3ab311ce885ac5620accbaa6cdae6bc15361eea6d2fbebc22660243427"} Dec 06 09:01:01 crc kubenswrapper[4763]: I1206 09:01:01.759731 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416861-n4vcz" event={"ID":"9a670801-b117-4ea7-b37b-b28dd79aa1c1","Type":"ContainerStarted","Data":"939d8d1a9dd530c41444317ffeebd488d69e1d367c8904919f8c5b0d51903c0d"} Dec 06 09:01:01 crc kubenswrapper[4763]: I1206 09:01:01.778732 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29416861-n4vcz" podStartSLOduration=1.77870917 podStartE2EDuration="1.77870917s" podCreationTimestamp="2025-12-06 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 09:01:01.776724307 +0000 UTC m=+2944.352429345" watchObservedRunningTime="2025-12-06 09:01:01.77870917 +0000 UTC m=+2944.354414208" Dec 06 09:01:03 crc kubenswrapper[4763]: E1206 09:01:03.787381 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:01:04 crc kubenswrapper[4763]: I1206 09:01:04.790992 4763 generic.go:334] "Generic (PLEG): container finished" podID="9a670801-b117-4ea7-b37b-b28dd79aa1c1" containerID="e4211d3ab311ce885ac5620accbaa6cdae6bc15361eea6d2fbebc22660243427" exitCode=0 Dec 06 09:01:04 crc kubenswrapper[4763]: I1206 09:01:04.791091 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416861-n4vcz" event={"ID":"9a670801-b117-4ea7-b37b-b28dd79aa1c1","Type":"ContainerDied","Data":"e4211d3ab311ce885ac5620accbaa6cdae6bc15361eea6d2fbebc22660243427"} Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.376149 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.490572 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys\") pod \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.490700 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf6md\" (UniqueName: \"kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md\") pod \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.490802 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle\") pod \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.490879 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data\") pod \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\" (UID: \"9a670801-b117-4ea7-b37b-b28dd79aa1c1\") " Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.497374 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9a670801-b117-4ea7-b37b-b28dd79aa1c1" (UID: "9a670801-b117-4ea7-b37b-b28dd79aa1c1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.497819 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md" (OuterVolumeSpecName: "kube-api-access-xf6md") pod "9a670801-b117-4ea7-b37b-b28dd79aa1c1" (UID: "9a670801-b117-4ea7-b37b-b28dd79aa1c1"). InnerVolumeSpecName "kube-api-access-xf6md". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.539861 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a670801-b117-4ea7-b37b-b28dd79aa1c1" (UID: "9a670801-b117-4ea7-b37b-b28dd79aa1c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.561343 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data" (OuterVolumeSpecName: "config-data") pod "9a670801-b117-4ea7-b37b-b28dd79aa1c1" (UID: "9a670801-b117-4ea7-b37b-b28dd79aa1c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.594155 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.594203 4763 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.594218 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf6md\" (UniqueName: \"kubernetes.io/projected/9a670801-b117-4ea7-b37b-b28dd79aa1c1-kube-api-access-xf6md\") on node \"crc\" DevicePath \"\"" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.594232 4763 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a670801-b117-4ea7-b37b-b28dd79aa1c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.811593 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416861-n4vcz" event={"ID":"9a670801-b117-4ea7-b37b-b28dd79aa1c1","Type":"ContainerDied","Data":"939d8d1a9dd530c41444317ffeebd488d69e1d367c8904919f8c5b0d51903c0d"} Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.811635 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="939d8d1a9dd530c41444317ffeebd488d69e1d367c8904919f8c5b0d51903c0d" Dec 06 09:01:06 crc kubenswrapper[4763]: I1206 09:01:06.811710 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416861-n4vcz" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.536614 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.537514 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.537595 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.538786 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.539120 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" gracePeriod=600 Dec 06 09:01:12 crc kubenswrapper[4763]: E1206 09:01:12.675807 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.866841 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" exitCode=0 Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.866932 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3"} Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.866992 4763 scope.go:117] "RemoveContainer" containerID="f2fa2c2455ec78318208b1caf4309598a65f3e11298ae4bb560de6abc47258a9" Dec 06 09:01:12 crc kubenswrapper[4763]: I1206 09:01:12.867880 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:01:12 crc kubenswrapper[4763]: E1206 09:01:12.868191 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:01:14 crc kubenswrapper[4763]: E1206 09:01:14.032229 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:01:24 crc kubenswrapper[4763]: E1206 09:01:24.359653 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:01:25 crc kubenswrapper[4763]: I1206 09:01:25.721041 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:01:25 crc kubenswrapper[4763]: E1206 09:01:25.723156 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:01:34 crc kubenswrapper[4763]: E1206 09:01:34.626174 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:01:39 crc kubenswrapper[4763]: I1206 09:01:39.720791 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:01:39 crc kubenswrapper[4763]: E1206 09:01:39.721579 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:01:44 crc kubenswrapper[4763]: E1206 09:01:44.885156 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:01:52 crc kubenswrapper[4763]: I1206 09:01:52.720341 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:01:52 crc kubenswrapper[4763]: E1206 09:01:52.721095 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:01:55 crc kubenswrapper[4763]: E1206 09:01:55.162412 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-conmon-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod493fe206_88b1_4be8_b695_9ae130943f97.slice/crio-259f141ef91459d1f2a2fcd0bbceb5808dea3f016c84720edb15b43919413ecd.scope\": RecentStats: unable to find data in memory cache]" Dec 06 09:02:07 crc kubenswrapper[4763]: I1206 09:02:07.726911 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:02:07 crc kubenswrapper[4763]: E1206 09:02:07.727692 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:02:20 crc kubenswrapper[4763]: I1206 09:02:20.719607 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:02:20 crc kubenswrapper[4763]: E1206 09:02:20.720368 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:02:31 crc kubenswrapper[4763]: I1206 09:02:31.720980 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:02:31 crc kubenswrapper[4763]: E1206 09:02:31.721731 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:02:42 crc kubenswrapper[4763]: I1206 09:02:42.719893 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:02:42 crc kubenswrapper[4763]: E1206 09:02:42.720652 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:02:56 crc kubenswrapper[4763]: I1206 09:02:56.719466 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:02:56 crc kubenswrapper[4763]: E1206 09:02:56.720495 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:03:08 crc kubenswrapper[4763]: I1206 09:03:08.720053 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:03:08 crc kubenswrapper[4763]: E1206 09:03:08.720780 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:03:23 crc kubenswrapper[4763]: I1206 09:03:23.719702 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:03:23 crc kubenswrapper[4763]: E1206 09:03:23.720488 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:03:37 crc kubenswrapper[4763]: I1206 09:03:37.726054 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:03:37 crc kubenswrapper[4763]: E1206 09:03:37.726819 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:03:48 crc kubenswrapper[4763]: I1206 09:03:48.720079 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:03:48 crc kubenswrapper[4763]: E1206 09:03:48.720981 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:04:00 crc kubenswrapper[4763]: I1206 09:04:00.719829 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:04:00 crc kubenswrapper[4763]: E1206 09:04:00.720603 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:04:14 crc kubenswrapper[4763]: I1206 09:04:14.719863 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:04:14 crc kubenswrapper[4763]: E1206 09:04:14.720688 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:04:25 crc kubenswrapper[4763]: I1206 09:04:25.719752 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:04:25 crc kubenswrapper[4763]: E1206 09:04:25.720995 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:04:39 crc kubenswrapper[4763]: I1206 09:04:39.719653 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:04:39 crc kubenswrapper[4763]: E1206 09:04:39.720432 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:04:52 crc kubenswrapper[4763]: I1206 09:04:52.720616 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:04:52 crc kubenswrapper[4763]: E1206 09:04:52.722218 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:04 crc kubenswrapper[4763]: I1206 09:05:04.719530 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:05:04 crc kubenswrapper[4763]: E1206 09:05:04.720400 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:17 crc kubenswrapper[4763]: I1206 09:05:17.726268 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:05:17 crc kubenswrapper[4763]: E1206 09:05:17.727091 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:30 crc kubenswrapper[4763]: I1206 09:05:30.719614 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:05:30 crc kubenswrapper[4763]: E1206 09:05:30.720641 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:41 crc kubenswrapper[4763]: I1206 09:05:41.719983 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:05:41 crc kubenswrapper[4763]: E1206 09:05:41.720969 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.708965 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:05:52 crc kubenswrapper[4763]: E1206 09:05:52.710134 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a670801-b117-4ea7-b37b-b28dd79aa1c1" containerName="keystone-cron" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.710154 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a670801-b117-4ea7-b37b-b28dd79aa1c1" containerName="keystone-cron" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.710443 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a670801-b117-4ea7-b37b-b28dd79aa1c1" containerName="keystone-cron" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.713475 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.720350 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:05:52 crc kubenswrapper[4763]: E1206 09:05:52.720774 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.728637 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.813577 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sdlw\" (UniqueName: \"kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.813649 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.813678 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.915316 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sdlw\" (UniqueName: \"kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.915372 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.915398 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.915772 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.915851 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:52 crc kubenswrapper[4763]: I1206 09:05:52.944543 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sdlw\" (UniqueName: \"kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw\") pod \"redhat-operators-fkdz7\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:53 crc kubenswrapper[4763]: I1206 09:05:53.057655 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:05:53 crc kubenswrapper[4763]: I1206 09:05:53.643743 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:05:54 crc kubenswrapper[4763]: I1206 09:05:54.578553 4763 generic.go:334] "Generic (PLEG): container finished" podID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerID="a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c" exitCode=0 Dec 06 09:05:54 crc kubenswrapper[4763]: I1206 09:05:54.578688 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerDied","Data":"a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c"} Dec 06 09:05:54 crc kubenswrapper[4763]: I1206 09:05:54.579055 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerStarted","Data":"548c0e5a8d8d2f4d57933c7eb1355da9ec50bba139951564b8bf8ec7dde8be80"} Dec 06 09:05:54 crc kubenswrapper[4763]: I1206 09:05:54.581745 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:05:55 crc kubenswrapper[4763]: I1206 09:05:55.590005 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerStarted","Data":"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701"} Dec 06 09:05:58 crc kubenswrapper[4763]: I1206 09:05:58.620243 4763 generic.go:334] "Generic (PLEG): container finished" podID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerID="8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701" exitCode=0 Dec 06 09:05:58 crc kubenswrapper[4763]: I1206 09:05:58.620315 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerDied","Data":"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701"} Dec 06 09:05:59 crc kubenswrapper[4763]: I1206 09:05:59.631203 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerStarted","Data":"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5"} Dec 06 09:05:59 crc kubenswrapper[4763]: I1206 09:05:59.649610 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fkdz7" podStartSLOduration=3.2044684 podStartE2EDuration="7.64959051s" podCreationTimestamp="2025-12-06 09:05:52 +0000 UTC" firstStartedPulling="2025-12-06 09:05:54.581486322 +0000 UTC m=+3237.157191360" lastFinishedPulling="2025-12-06 09:05:59.026608432 +0000 UTC m=+3241.602313470" observedRunningTime="2025-12-06 09:05:59.647867014 +0000 UTC m=+3242.223572062" watchObservedRunningTime="2025-12-06 09:05:59.64959051 +0000 UTC m=+3242.225295548" Dec 06 09:06:03 crc kubenswrapper[4763]: I1206 09:06:03.058659 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:03 crc kubenswrapper[4763]: I1206 09:06:03.059478 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:04 crc kubenswrapper[4763]: I1206 09:06:04.120881 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fkdz7" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="registry-server" probeResult="failure" output=< Dec 06 09:06:04 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 09:06:04 crc kubenswrapper[4763]: > Dec 06 09:06:04 crc kubenswrapper[4763]: I1206 09:06:04.720016 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:06:04 crc kubenswrapper[4763]: E1206 09:06:04.720758 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:06:13 crc kubenswrapper[4763]: I1206 09:06:13.116047 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:13 crc kubenswrapper[4763]: I1206 09:06:13.164146 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:13 crc kubenswrapper[4763]: I1206 09:06:13.361769 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:06:14 crc kubenswrapper[4763]: I1206 09:06:14.785804 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fkdz7" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="registry-server" containerID="cri-o://87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5" gracePeriod=2 Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.290730 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.358548 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sdlw\" (UniqueName: \"kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw\") pod \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.358816 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities\") pod \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.358873 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content\") pod \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\" (UID: \"65d71b37-d6fc-4cf9-8725-ce7004ec3e39\") " Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.359402 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities" (OuterVolumeSpecName: "utilities") pod "65d71b37-d6fc-4cf9-8725-ce7004ec3e39" (UID: "65d71b37-d6fc-4cf9-8725-ce7004ec3e39"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.360162 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.372238 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw" (OuterVolumeSpecName: "kube-api-access-6sdlw") pod "65d71b37-d6fc-4cf9-8725-ce7004ec3e39" (UID: "65d71b37-d6fc-4cf9-8725-ce7004ec3e39"). InnerVolumeSpecName "kube-api-access-6sdlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.462358 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sdlw\" (UniqueName: \"kubernetes.io/projected/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-kube-api-access-6sdlw\") on node \"crc\" DevicePath \"\"" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.469165 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65d71b37-d6fc-4cf9-8725-ce7004ec3e39" (UID: "65d71b37-d6fc-4cf9-8725-ce7004ec3e39"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.563872 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65d71b37-d6fc-4cf9-8725-ce7004ec3e39-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.795617 4763 generic.go:334] "Generic (PLEG): container finished" podID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerID="87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5" exitCode=0 Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.795657 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerDied","Data":"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5"} Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.795677 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fkdz7" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.795695 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fkdz7" event={"ID":"65d71b37-d6fc-4cf9-8725-ce7004ec3e39","Type":"ContainerDied","Data":"548c0e5a8d8d2f4d57933c7eb1355da9ec50bba139951564b8bf8ec7dde8be80"} Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.795723 4763 scope.go:117] "RemoveContainer" containerID="87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.820766 4763 scope.go:117] "RemoveContainer" containerID="8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.824050 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.835037 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fkdz7"] Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.848743 4763 scope.go:117] "RemoveContainer" containerID="a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.893414 4763 scope.go:117] "RemoveContainer" containerID="87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5" Dec 06 09:06:15 crc kubenswrapper[4763]: E1206 09:06:15.893809 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5\": container with ID starting with 87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5 not found: ID does not exist" containerID="87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.893841 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5"} err="failed to get container status \"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5\": rpc error: code = NotFound desc = could not find container \"87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5\": container with ID starting with 87c6ca1973b07fc68863ba2d1128f6d450512e3e0fb0e8194acd2b460619cfb5 not found: ID does not exist" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.893862 4763 scope.go:117] "RemoveContainer" containerID="8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701" Dec 06 09:06:15 crc kubenswrapper[4763]: E1206 09:06:15.894133 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701\": container with ID starting with 8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701 not found: ID does not exist" containerID="8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.894159 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701"} err="failed to get container status \"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701\": rpc error: code = NotFound desc = could not find container \"8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701\": container with ID starting with 8e435d09983ad87c1ff474976ad5faac610551e9784e447ba31ed751b772b701 not found: ID does not exist" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.894175 4763 scope.go:117] "RemoveContainer" containerID="a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c" Dec 06 09:06:15 crc kubenswrapper[4763]: E1206 09:06:15.894397 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c\": container with ID starting with a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c not found: ID does not exist" containerID="a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c" Dec 06 09:06:15 crc kubenswrapper[4763]: I1206 09:06:15.894430 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c"} err="failed to get container status \"a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c\": rpc error: code = NotFound desc = could not find container \"a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c\": container with ID starting with a054297d50b55586d51137907d9e7c130c9631f94f0986f037dffc366b46fe5c not found: ID does not exist" Dec 06 09:06:16 crc kubenswrapper[4763]: I1206 09:06:16.719434 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:06:17 crc kubenswrapper[4763]: I1206 09:06:17.742041 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" path="/var/lib/kubelet/pods/65d71b37-d6fc-4cf9-8725-ce7004ec3e39/volumes" Dec 06 09:06:17 crc kubenswrapper[4763]: I1206 09:06:17.848349 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c"} Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.538830 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:01 crc kubenswrapper[4763]: E1206 09:07:01.539861 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="registry-server" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.539880 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="registry-server" Dec 06 09:07:01 crc kubenswrapper[4763]: E1206 09:07:01.539932 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="extract-content" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.539946 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="extract-content" Dec 06 09:07:01 crc kubenswrapper[4763]: E1206 09:07:01.539968 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="extract-utilities" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.539976 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="extract-utilities" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.540225 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d71b37-d6fc-4cf9-8725-ce7004ec3e39" containerName="registry-server" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.543287 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.553548 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.619688 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.619892 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wgqq\" (UniqueName: \"kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.620121 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.721752 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.721858 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.721999 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wgqq\" (UniqueName: \"kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.722299 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.722374 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.743283 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wgqq\" (UniqueName: \"kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq\") pod \"community-operators-hqqpt\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:01 crc kubenswrapper[4763]: I1206 09:07:01.869268 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:02 crc kubenswrapper[4763]: I1206 09:07:02.424212 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:03 crc kubenswrapper[4763]: I1206 09:07:03.373019 4763 generic.go:334] "Generic (PLEG): container finished" podID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerID="a8cd9d5be7fde4387ca762456007b0461bba20ba449b225426160b4aa615d0b7" exitCode=0 Dec 06 09:07:03 crc kubenswrapper[4763]: I1206 09:07:03.373103 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerDied","Data":"a8cd9d5be7fde4387ca762456007b0461bba20ba449b225426160b4aa615d0b7"} Dec 06 09:07:03 crc kubenswrapper[4763]: I1206 09:07:03.373398 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerStarted","Data":"9c17a272d6dfc093d1c6d7d02f808c8c039d0ff568ef41dfee7fb15ff475a4bd"} Dec 06 09:07:04 crc kubenswrapper[4763]: I1206 09:07:04.384316 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerStarted","Data":"5ab4513b046f43d0ef8ef3a049dc4dafd2b7e9486b48b0912e024c8674f25073"} Dec 06 09:07:05 crc kubenswrapper[4763]: I1206 09:07:05.394555 4763 generic.go:334] "Generic (PLEG): container finished" podID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerID="5ab4513b046f43d0ef8ef3a049dc4dafd2b7e9486b48b0912e024c8674f25073" exitCode=0 Dec 06 09:07:05 crc kubenswrapper[4763]: I1206 09:07:05.394593 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerDied","Data":"5ab4513b046f43d0ef8ef3a049dc4dafd2b7e9486b48b0912e024c8674f25073"} Dec 06 09:07:06 crc kubenswrapper[4763]: I1206 09:07:06.405513 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerStarted","Data":"4d9bafde20f2bf45413bccbc60d63457196a07da35ea021ebbade76422fbef5c"} Dec 06 09:07:06 crc kubenswrapper[4763]: I1206 09:07:06.425865 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hqqpt" podStartSLOduration=3.020407451 podStartE2EDuration="5.425847303s" podCreationTimestamp="2025-12-06 09:07:01 +0000 UTC" firstStartedPulling="2025-12-06 09:07:03.376229369 +0000 UTC m=+3305.951934407" lastFinishedPulling="2025-12-06 09:07:05.781669221 +0000 UTC m=+3308.357374259" observedRunningTime="2025-12-06 09:07:06.423070948 +0000 UTC m=+3308.998776016" watchObservedRunningTime="2025-12-06 09:07:06.425847303 +0000 UTC m=+3309.001552351" Dec 06 09:07:11 crc kubenswrapper[4763]: I1206 09:07:11.869426 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:11 crc kubenswrapper[4763]: I1206 09:07:11.870716 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:11 crc kubenswrapper[4763]: I1206 09:07:11.914236 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:12 crc kubenswrapper[4763]: I1206 09:07:12.499137 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.310691 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.311317 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hqqpt" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="registry-server" containerID="cri-o://4d9bafde20f2bf45413bccbc60d63457196a07da35ea021ebbade76422fbef5c" gracePeriod=2 Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.483105 4763 generic.go:334] "Generic (PLEG): container finished" podID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerID="4d9bafde20f2bf45413bccbc60d63457196a07da35ea021ebbade76422fbef5c" exitCode=0 Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.483193 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerDied","Data":"4d9bafde20f2bf45413bccbc60d63457196a07da35ea021ebbade76422fbef5c"} Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.790478 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.822334 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities\") pod \"eeead7cf-70bb-4c46-9341-f6c13054b66d\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.822483 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content\") pod \"eeead7cf-70bb-4c46-9341-f6c13054b66d\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.822530 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wgqq\" (UniqueName: \"kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq\") pod \"eeead7cf-70bb-4c46-9341-f6c13054b66d\" (UID: \"eeead7cf-70bb-4c46-9341-f6c13054b66d\") " Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.824921 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities" (OuterVolumeSpecName: "utilities") pod "eeead7cf-70bb-4c46-9341-f6c13054b66d" (UID: "eeead7cf-70bb-4c46-9341-f6c13054b66d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.830455 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq" (OuterVolumeSpecName: "kube-api-access-4wgqq") pod "eeead7cf-70bb-4c46-9341-f6c13054b66d" (UID: "eeead7cf-70bb-4c46-9341-f6c13054b66d"). InnerVolumeSpecName "kube-api-access-4wgqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.891200 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eeead7cf-70bb-4c46-9341-f6c13054b66d" (UID: "eeead7cf-70bb-4c46-9341-f6c13054b66d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.924233 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.924276 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eeead7cf-70bb-4c46-9341-f6c13054b66d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:07:15 crc kubenswrapper[4763]: I1206 09:07:15.924289 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wgqq\" (UniqueName: \"kubernetes.io/projected/eeead7cf-70bb-4c46-9341-f6c13054b66d-kube-api-access-4wgqq\") on node \"crc\" DevicePath \"\"" Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.495083 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqqpt" event={"ID":"eeead7cf-70bb-4c46-9341-f6c13054b66d","Type":"ContainerDied","Data":"9c17a272d6dfc093d1c6d7d02f808c8c039d0ff568ef41dfee7fb15ff475a4bd"} Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.495167 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqqpt" Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.495487 4763 scope.go:117] "RemoveContainer" containerID="4d9bafde20f2bf45413bccbc60d63457196a07da35ea021ebbade76422fbef5c" Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.514750 4763 scope.go:117] "RemoveContainer" containerID="5ab4513b046f43d0ef8ef3a049dc4dafd2b7e9486b48b0912e024c8674f25073" Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.543269 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.544814 4763 scope.go:117] "RemoveContainer" containerID="a8cd9d5be7fde4387ca762456007b0461bba20ba449b225426160b4aa615d0b7" Dec 06 09:07:16 crc kubenswrapper[4763]: I1206 09:07:16.555755 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hqqpt"] Dec 06 09:07:17 crc kubenswrapper[4763]: I1206 09:07:17.739626 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" path="/var/lib/kubelet/pods/eeead7cf-70bb-4c46-9341-f6c13054b66d/volumes" Dec 06 09:08:42 crc kubenswrapper[4763]: I1206 09:08:42.539126 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:08:42 crc kubenswrapper[4763]: I1206 09:08:42.539803 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:09:12 crc kubenswrapper[4763]: I1206 09:09:12.538599 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:09:12 crc kubenswrapper[4763]: I1206 09:09:12.539634 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.585103 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:33 crc kubenswrapper[4763]: E1206 09:09:33.586294 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="extract-utilities" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.586313 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="extract-utilities" Dec 06 09:09:33 crc kubenswrapper[4763]: E1206 09:09:33.586364 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="extract-content" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.586372 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="extract-content" Dec 06 09:09:33 crc kubenswrapper[4763]: E1206 09:09:33.586388 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="registry-server" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.586396 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="registry-server" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.586643 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeead7cf-70bb-4c46-9341-f6c13054b66d" containerName="registry-server" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.588516 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.612233 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.670421 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.670507 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4s2b\" (UniqueName: \"kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.670545 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.774234 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.774310 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4s2b\" (UniqueName: \"kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.774347 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.774872 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.775355 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.811748 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4s2b\" (UniqueName: \"kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b\") pod \"certified-operators-hqq6x\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:33 crc kubenswrapper[4763]: I1206 09:09:33.912341 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:34 crc kubenswrapper[4763]: I1206 09:09:34.447297 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:34 crc kubenswrapper[4763]: I1206 09:09:34.999117 4763 generic.go:334] "Generic (PLEG): container finished" podID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerID="eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04" exitCode=0 Dec 06 09:09:34 crc kubenswrapper[4763]: I1206 09:09:34.999178 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerDied","Data":"eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04"} Dec 06 09:09:35 crc kubenswrapper[4763]: I1206 09:09:34.999219 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerStarted","Data":"94378a49385a8cb9305712029be8fea2a87ee2249fe4289f03b74bff54bf859a"} Dec 06 09:09:36 crc kubenswrapper[4763]: I1206 09:09:36.012288 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerStarted","Data":"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947"} Dec 06 09:09:37 crc kubenswrapper[4763]: I1206 09:09:37.033449 4763 generic.go:334] "Generic (PLEG): container finished" podID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerID="aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947" exitCode=0 Dec 06 09:09:37 crc kubenswrapper[4763]: I1206 09:09:37.033507 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerDied","Data":"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947"} Dec 06 09:09:38 crc kubenswrapper[4763]: I1206 09:09:38.044348 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerStarted","Data":"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b"} Dec 06 09:09:38 crc kubenswrapper[4763]: I1206 09:09:38.063337 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hqq6x" podStartSLOduration=2.671763027 podStartE2EDuration="5.063315927s" podCreationTimestamp="2025-12-06 09:09:33 +0000 UTC" firstStartedPulling="2025-12-06 09:09:35.001984062 +0000 UTC m=+3457.577689110" lastFinishedPulling="2025-12-06 09:09:37.393536972 +0000 UTC m=+3459.969242010" observedRunningTime="2025-12-06 09:09:38.0630633 +0000 UTC m=+3460.638768348" watchObservedRunningTime="2025-12-06 09:09:38.063315927 +0000 UTC m=+3460.639020965" Dec 06 09:09:42 crc kubenswrapper[4763]: I1206 09:09:42.537365 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:09:42 crc kubenswrapper[4763]: I1206 09:09:42.537758 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:09:42 crc kubenswrapper[4763]: I1206 09:09:42.537821 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:09:42 crc kubenswrapper[4763]: I1206 09:09:42.538881 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:09:42 crc kubenswrapper[4763]: I1206 09:09:42.538975 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c" gracePeriod=600 Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.093464 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c" exitCode=0 Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.093985 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c"} Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.094158 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04"} Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.094183 4763 scope.go:117] "RemoveContainer" containerID="290c1d183dad5c17380c06a0c5a043fefea18a95b747f998386a9eeaaeea85d3" Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.913275 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.913653 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:43 crc kubenswrapper[4763]: I1206 09:09:43.962647 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:44 crc kubenswrapper[4763]: I1206 09:09:44.151039 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:45 crc kubenswrapper[4763]: I1206 09:09:45.358658 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.123514 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hqq6x" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="registry-server" containerID="cri-o://2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b" gracePeriod=2 Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.588410 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.638367 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities\") pod \"01027f93-b2e4-4cfa-91fb-c061aff14626\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.638405 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content\") pod \"01027f93-b2e4-4cfa-91fb-c061aff14626\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.638524 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4s2b\" (UniqueName: \"kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b\") pod \"01027f93-b2e4-4cfa-91fb-c061aff14626\" (UID: \"01027f93-b2e4-4cfa-91fb-c061aff14626\") " Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.639249 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities" (OuterVolumeSpecName: "utilities") pod "01027f93-b2e4-4cfa-91fb-c061aff14626" (UID: "01027f93-b2e4-4cfa-91fb-c061aff14626"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.645736 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b" (OuterVolumeSpecName: "kube-api-access-s4s2b") pod "01027f93-b2e4-4cfa-91fb-c061aff14626" (UID: "01027f93-b2e4-4cfa-91fb-c061aff14626"). InnerVolumeSpecName "kube-api-access-s4s2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.695299 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01027f93-b2e4-4cfa-91fb-c061aff14626" (UID: "01027f93-b2e4-4cfa-91fb-c061aff14626"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.741266 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.741296 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01027f93-b2e4-4cfa-91fb-c061aff14626-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:09:46 crc kubenswrapper[4763]: I1206 09:09:46.741332 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4s2b\" (UniqueName: \"kubernetes.io/projected/01027f93-b2e4-4cfa-91fb-c061aff14626-kube-api-access-s4s2b\") on node \"crc\" DevicePath \"\"" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.137135 4763 generic.go:334] "Generic (PLEG): container finished" podID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerID="2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b" exitCode=0 Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.137181 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerDied","Data":"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b"} Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.137214 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqq6x" event={"ID":"01027f93-b2e4-4cfa-91fb-c061aff14626","Type":"ContainerDied","Data":"94378a49385a8cb9305712029be8fea2a87ee2249fe4289f03b74bff54bf859a"} Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.137237 4763 scope.go:117] "RemoveContainer" containerID="2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.137233 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqq6x" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.166141 4763 scope.go:117] "RemoveContainer" containerID="aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.181709 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.193764 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hqq6x"] Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.208684 4763 scope.go:117] "RemoveContainer" containerID="eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.252615 4763 scope.go:117] "RemoveContainer" containerID="2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b" Dec 06 09:09:47 crc kubenswrapper[4763]: E1206 09:09:47.253088 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b\": container with ID starting with 2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b not found: ID does not exist" containerID="2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.253131 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b"} err="failed to get container status \"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b\": rpc error: code = NotFound desc = could not find container \"2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b\": container with ID starting with 2060519cb70b6656e3b9b2879d9211d0ff83d7fa46776026c92bbb673f1cdc8b not found: ID does not exist" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.253160 4763 scope.go:117] "RemoveContainer" containerID="aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947" Dec 06 09:09:47 crc kubenswrapper[4763]: E1206 09:09:47.253479 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947\": container with ID starting with aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947 not found: ID does not exist" containerID="aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.253513 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947"} err="failed to get container status \"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947\": rpc error: code = NotFound desc = could not find container \"aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947\": container with ID starting with aa5145dcd2d1d87ec6702ad9b6a05fb4b551b14e58bda4831f5de9d006898947 not found: ID does not exist" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.253530 4763 scope.go:117] "RemoveContainer" containerID="eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04" Dec 06 09:09:47 crc kubenswrapper[4763]: E1206 09:09:47.253730 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04\": container with ID starting with eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04 not found: ID does not exist" containerID="eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.253759 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04"} err="failed to get container status \"eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04\": rpc error: code = NotFound desc = could not find container \"eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04\": container with ID starting with eb41bdd2144d50eaf4ea09186f363137a5da77db8379c4d3ca04cdbb5c26ec04 not found: ID does not exist" Dec 06 09:09:47 crc kubenswrapper[4763]: I1206 09:09:47.734097 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" path="/var/lib/kubelet/pods/01027f93-b2e4-4cfa-91fb-c061aff14626/volumes" Dec 06 09:11:42 crc kubenswrapper[4763]: I1206 09:11:42.537740 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:11:42 crc kubenswrapper[4763]: I1206 09:11:42.538277 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.835656 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:08 crc kubenswrapper[4763]: E1206 09:12:08.837284 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="extract-utilities" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.837317 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="extract-utilities" Dec 06 09:12:08 crc kubenswrapper[4763]: E1206 09:12:08.837355 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="registry-server" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.837371 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="registry-server" Dec 06 09:12:08 crc kubenswrapper[4763]: E1206 09:12:08.837416 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="extract-content" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.837433 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="extract-content" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.837970 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="01027f93-b2e4-4cfa-91fb-c061aff14626" containerName="registry-server" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.841528 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.849456 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.887265 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.887314 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkrwz\" (UniqueName: \"kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.887374 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.988720 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.988759 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkrwz\" (UniqueName: \"kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.988798 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.989211 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:08 crc kubenswrapper[4763]: I1206 09:12:08.989391 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:09 crc kubenswrapper[4763]: I1206 09:12:09.008649 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkrwz\" (UniqueName: \"kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz\") pod \"redhat-marketplace-5mhcn\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:09 crc kubenswrapper[4763]: I1206 09:12:09.162610 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:09 crc kubenswrapper[4763]: I1206 09:12:09.685885 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:10 crc kubenswrapper[4763]: I1206 09:12:10.491684 4763 generic.go:334] "Generic (PLEG): container finished" podID="ac020674-b616-49e5-9169-ce9891d5a268" containerID="78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef" exitCode=0 Dec 06 09:12:10 crc kubenswrapper[4763]: I1206 09:12:10.491762 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerDied","Data":"78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef"} Dec 06 09:12:10 crc kubenswrapper[4763]: I1206 09:12:10.491972 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerStarted","Data":"d9be9aa1fafe9b8645d4839cab8f2438b8fac83818650e1c7a9d2949bbf7f5ff"} Dec 06 09:12:10 crc kubenswrapper[4763]: I1206 09:12:10.494143 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:12:12 crc kubenswrapper[4763]: I1206 09:12:12.509728 4763 generic.go:334] "Generic (PLEG): container finished" podID="ac020674-b616-49e5-9169-ce9891d5a268" containerID="71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab" exitCode=0 Dec 06 09:12:12 crc kubenswrapper[4763]: I1206 09:12:12.509768 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerDied","Data":"71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab"} Dec 06 09:12:12 crc kubenswrapper[4763]: I1206 09:12:12.537464 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:12:12 crc kubenswrapper[4763]: I1206 09:12:12.537521 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:12:13 crc kubenswrapper[4763]: I1206 09:12:13.520709 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerStarted","Data":"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493"} Dec 06 09:12:13 crc kubenswrapper[4763]: I1206 09:12:13.541236 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5mhcn" podStartSLOduration=3.068660989 podStartE2EDuration="5.541215163s" podCreationTimestamp="2025-12-06 09:12:08 +0000 UTC" firstStartedPulling="2025-12-06 09:12:10.493722604 +0000 UTC m=+3613.069427652" lastFinishedPulling="2025-12-06 09:12:12.966276778 +0000 UTC m=+3615.541981826" observedRunningTime="2025-12-06 09:12:13.535475818 +0000 UTC m=+3616.111180876" watchObservedRunningTime="2025-12-06 09:12:13.541215163 +0000 UTC m=+3616.116920201" Dec 06 09:12:19 crc kubenswrapper[4763]: I1206 09:12:19.162951 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:19 crc kubenswrapper[4763]: I1206 09:12:19.163509 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:19 crc kubenswrapper[4763]: I1206 09:12:19.227378 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:19 crc kubenswrapper[4763]: I1206 09:12:19.626168 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:19 crc kubenswrapper[4763]: I1206 09:12:19.689707 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:21 crc kubenswrapper[4763]: I1206 09:12:21.588801 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5mhcn" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="registry-server" containerID="cri-o://b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493" gracePeriod=2 Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.101790 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.266666 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities\") pod \"ac020674-b616-49e5-9169-ce9891d5a268\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.266736 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkrwz\" (UniqueName: \"kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz\") pod \"ac020674-b616-49e5-9169-ce9891d5a268\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.266842 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content\") pod \"ac020674-b616-49e5-9169-ce9891d5a268\" (UID: \"ac020674-b616-49e5-9169-ce9891d5a268\") " Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.267765 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities" (OuterVolumeSpecName: "utilities") pod "ac020674-b616-49e5-9169-ce9891d5a268" (UID: "ac020674-b616-49e5-9169-ce9891d5a268"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.278196 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz" (OuterVolumeSpecName: "kube-api-access-bkrwz") pod "ac020674-b616-49e5-9169-ce9891d5a268" (UID: "ac020674-b616-49e5-9169-ce9891d5a268"). InnerVolumeSpecName "kube-api-access-bkrwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.290156 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac020674-b616-49e5-9169-ce9891d5a268" (UID: "ac020674-b616-49e5-9169-ce9891d5a268"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.369885 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.369933 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkrwz\" (UniqueName: \"kubernetes.io/projected/ac020674-b616-49e5-9169-ce9891d5a268-kube-api-access-bkrwz\") on node \"crc\" DevicePath \"\"" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.369944 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac020674-b616-49e5-9169-ce9891d5a268-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.601053 4763 generic.go:334] "Generic (PLEG): container finished" podID="ac020674-b616-49e5-9169-ce9891d5a268" containerID="b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493" exitCode=0 Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.601102 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerDied","Data":"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493"} Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.601157 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mhcn" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.601179 4763 scope.go:117] "RemoveContainer" containerID="b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.601166 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mhcn" event={"ID":"ac020674-b616-49e5-9169-ce9891d5a268","Type":"ContainerDied","Data":"d9be9aa1fafe9b8645d4839cab8f2438b8fac83818650e1c7a9d2949bbf7f5ff"} Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.625645 4763 scope.go:117] "RemoveContainer" containerID="71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.657833 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.670920 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mhcn"] Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.672596 4763 scope.go:117] "RemoveContainer" containerID="78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.704450 4763 scope.go:117] "RemoveContainer" containerID="b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493" Dec 06 09:12:22 crc kubenswrapper[4763]: E1206 09:12:22.705330 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493\": container with ID starting with b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493 not found: ID does not exist" containerID="b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.705395 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493"} err="failed to get container status \"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493\": rpc error: code = NotFound desc = could not find container \"b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493\": container with ID starting with b0ef012e84af114913459b6e9573fb839f987b03e8fa15eb7fbddda9fcf36493 not found: ID does not exist" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.705468 4763 scope.go:117] "RemoveContainer" containerID="71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab" Dec 06 09:12:22 crc kubenswrapper[4763]: E1206 09:12:22.705855 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab\": container with ID starting with 71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab not found: ID does not exist" containerID="71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.705890 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab"} err="failed to get container status \"71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab\": rpc error: code = NotFound desc = could not find container \"71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab\": container with ID starting with 71444be83af81b2d4ec1d36370bb0c42b71a844fda3214fa817d58d9fb8911ab not found: ID does not exist" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.705944 4763 scope.go:117] "RemoveContainer" containerID="78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef" Dec 06 09:12:22 crc kubenswrapper[4763]: E1206 09:12:22.706223 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef\": container with ID starting with 78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef not found: ID does not exist" containerID="78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef" Dec 06 09:12:22 crc kubenswrapper[4763]: I1206 09:12:22.706254 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef"} err="failed to get container status \"78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef\": rpc error: code = NotFound desc = could not find container \"78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef\": container with ID starting with 78f6a1c93e32cd7e2ebcb9c9e1d3c2a81a44bb3ca5656509439981f90ded28ef not found: ID does not exist" Dec 06 09:12:23 crc kubenswrapper[4763]: I1206 09:12:23.744264 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac020674-b616-49e5-9169-ce9891d5a268" path="/var/lib/kubelet/pods/ac020674-b616-49e5-9169-ce9891d5a268/volumes" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.537112 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.537665 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.537726 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.538449 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.538489 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" gracePeriod=600 Dec 06 09:12:42 crc kubenswrapper[4763]: E1206 09:12:42.669207 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.786769 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" exitCode=0 Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.786816 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04"} Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.786862 4763 scope.go:117] "RemoveContainer" containerID="eb2d7066ce0beecbf97950caae8b88bf845a14b1f83287d600cc8341e8a6544c" Dec 06 09:12:42 crc kubenswrapper[4763]: I1206 09:12:42.787491 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:12:42 crc kubenswrapper[4763]: E1206 09:12:42.787782 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:12:57 crc kubenswrapper[4763]: I1206 09:12:57.727259 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:12:57 crc kubenswrapper[4763]: E1206 09:12:57.728181 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:13:10 crc kubenswrapper[4763]: I1206 09:13:10.720134 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:13:10 crc kubenswrapper[4763]: E1206 09:13:10.721068 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:13:22 crc kubenswrapper[4763]: I1206 09:13:22.720479 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:13:22 crc kubenswrapper[4763]: E1206 09:13:22.721065 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:13:37 crc kubenswrapper[4763]: I1206 09:13:37.726211 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:13:37 crc kubenswrapper[4763]: E1206 09:13:37.727015 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:13:49 crc kubenswrapper[4763]: I1206 09:13:49.719810 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:13:49 crc kubenswrapper[4763]: E1206 09:13:49.720632 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:14:00 crc kubenswrapper[4763]: I1206 09:14:00.720632 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:14:00 crc kubenswrapper[4763]: E1206 09:14:00.721819 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:14:11 crc kubenswrapper[4763]: I1206 09:14:11.719453 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:14:11 crc kubenswrapper[4763]: E1206 09:14:11.720407 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:14:24 crc kubenswrapper[4763]: I1206 09:14:24.724339 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:14:24 crc kubenswrapper[4763]: E1206 09:14:24.725405 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:14:39 crc kubenswrapper[4763]: I1206 09:14:39.720151 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:14:39 crc kubenswrapper[4763]: E1206 09:14:39.721122 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:14:51 crc kubenswrapper[4763]: I1206 09:14:51.720121 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:14:51 crc kubenswrapper[4763]: E1206 09:14:51.720861 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.184870 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j"] Dec 06 09:15:00 crc kubenswrapper[4763]: E1206 09:15:00.185926 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="extract-content" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.185943 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="extract-content" Dec 06 09:15:00 crc kubenswrapper[4763]: E1206 09:15:00.185956 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="registry-server" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.185963 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="registry-server" Dec 06 09:15:00 crc kubenswrapper[4763]: E1206 09:15:00.185979 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="extract-utilities" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.185988 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="extract-utilities" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.186253 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac020674-b616-49e5-9169-ce9891d5a268" containerName="registry-server" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.187274 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.189605 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.194033 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.200174 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j"] Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.312561 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.312989 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.313299 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkn8p\" (UniqueName: \"kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.415418 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkn8p\" (UniqueName: \"kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.415483 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.415518 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.416612 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.421578 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.438726 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkn8p\" (UniqueName: \"kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p\") pod \"collect-profiles-29416875-5dt4j\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:00 crc kubenswrapper[4763]: I1206 09:15:00.513099 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:01 crc kubenswrapper[4763]: I1206 09:15:01.168164 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j"] Dec 06 09:15:02 crc kubenswrapper[4763]: I1206 09:15:02.097169 4763 generic.go:334] "Generic (PLEG): container finished" podID="25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" containerID="1ec3abe19742c6a9cbfc1bed7138be3068ccbc47b7d1677bc799a9db95e7a9b2" exitCode=0 Dec 06 09:15:02 crc kubenswrapper[4763]: I1206 09:15:02.097221 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" event={"ID":"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d","Type":"ContainerDied","Data":"1ec3abe19742c6a9cbfc1bed7138be3068ccbc47b7d1677bc799a9db95e7a9b2"} Dec 06 09:15:02 crc kubenswrapper[4763]: I1206 09:15:02.097721 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" event={"ID":"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d","Type":"ContainerStarted","Data":"afec22d320a2f689a8d16b20cef55ad5551f3e414a8ffbb4ee3d0107193bc74b"} Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.459004 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.593245 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume\") pod \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.593408 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkn8p\" (UniqueName: \"kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p\") pod \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.593597 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume\") pod \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\" (UID: \"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d\") " Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.594876 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume" (OuterVolumeSpecName: "config-volume") pod "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" (UID: "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.600273 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p" (OuterVolumeSpecName: "kube-api-access-bkn8p") pod "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" (UID: "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d"). InnerVolumeSpecName "kube-api-access-bkn8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.600314 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" (UID: "25f9adf6-59f1-4517-8dde-bfdad7f6eb0d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.695988 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.696134 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:15:03 crc kubenswrapper[4763]: I1206 09:15:03.696151 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkn8p\" (UniqueName: \"kubernetes.io/projected/25f9adf6-59f1-4517-8dde-bfdad7f6eb0d-kube-api-access-bkn8p\") on node \"crc\" DevicePath \"\"" Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.115807 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" event={"ID":"25f9adf6-59f1-4517-8dde-bfdad7f6eb0d","Type":"ContainerDied","Data":"afec22d320a2f689a8d16b20cef55ad5551f3e414a8ffbb4ee3d0107193bc74b"} Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.116177 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afec22d320a2f689a8d16b20cef55ad5551f3e414a8ffbb4ee3d0107193bc74b" Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.115851 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416875-5dt4j" Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.534506 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256"] Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.542689 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416830-sp256"] Dec 06 09:15:04 crc kubenswrapper[4763]: I1206 09:15:04.719945 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:15:04 crc kubenswrapper[4763]: E1206 09:15:04.720227 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:15:06 crc kubenswrapper[4763]: I1206 09:15:06.097438 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5701fead-fb3b-4eeb-a0a4-279b89a10ee9" path="/var/lib/kubelet/pods/5701fead-fb3b-4eeb-a0a4-279b89a10ee9/volumes" Dec 06 09:15:16 crc kubenswrapper[4763]: I1206 09:15:16.720159 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:15:16 crc kubenswrapper[4763]: E1206 09:15:16.720886 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:15:24 crc kubenswrapper[4763]: I1206 09:15:24.469894 4763 scope.go:117] "RemoveContainer" containerID="bed80c7b261a8b7fc449b3b2148f37db30f01b6b829b518f8d14445723481ede" Dec 06 09:15:30 crc kubenswrapper[4763]: I1206 09:15:30.719587 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:15:30 crc kubenswrapper[4763]: E1206 09:15:30.720383 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:15:45 crc kubenswrapper[4763]: I1206 09:15:45.719429 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:15:45 crc kubenswrapper[4763]: E1206 09:15:45.720057 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:15:58 crc kubenswrapper[4763]: I1206 09:15:58.719703 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:15:58 crc kubenswrapper[4763]: E1206 09:15:58.720667 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:16:04 crc kubenswrapper[4763]: I1206 09:16:04.965414 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:04 crc kubenswrapper[4763]: E1206 09:16:04.966792 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" containerName="collect-profiles" Dec 06 09:16:04 crc kubenswrapper[4763]: I1206 09:16:04.966811 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" containerName="collect-profiles" Dec 06 09:16:04 crc kubenswrapper[4763]: I1206 09:16:04.967090 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="25f9adf6-59f1-4517-8dde-bfdad7f6eb0d" containerName="collect-profiles" Dec 06 09:16:04 crc kubenswrapper[4763]: I1206 09:16:04.968972 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:04 crc kubenswrapper[4763]: I1206 09:16:04.998992 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.082392 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.082708 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qrsg\" (UniqueName: \"kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.082739 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.185169 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.185256 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qrsg\" (UniqueName: \"kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.185292 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.185795 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.185888 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.210767 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qrsg\" (UniqueName: \"kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg\") pod \"redhat-operators-l6dhc\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.300003 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:05 crc kubenswrapper[4763]: I1206 09:16:05.818657 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:06 crc kubenswrapper[4763]: I1206 09:16:06.726316 4763 generic.go:334] "Generic (PLEG): container finished" podID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerID="86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba" exitCode=0 Dec 06 09:16:06 crc kubenswrapper[4763]: I1206 09:16:06.726431 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerDied","Data":"86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba"} Dec 06 09:16:06 crc kubenswrapper[4763]: I1206 09:16:06.726721 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerStarted","Data":"39fee979a7275f7349154c0bc8d64c7f0773a51a7f10d7c07c4375fbf411bb31"} Dec 06 09:16:07 crc kubenswrapper[4763]: I1206 09:16:07.737384 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerStarted","Data":"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132"} Dec 06 09:16:09 crc kubenswrapper[4763]: I1206 09:16:09.755862 4763 generic.go:334] "Generic (PLEG): container finished" podID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerID="faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132" exitCode=0 Dec 06 09:16:09 crc kubenswrapper[4763]: I1206 09:16:09.755967 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerDied","Data":"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132"} Dec 06 09:16:10 crc kubenswrapper[4763]: I1206 09:16:10.768057 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerStarted","Data":"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6"} Dec 06 09:16:10 crc kubenswrapper[4763]: I1206 09:16:10.799991 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l6dhc" podStartSLOduration=3.383649008 podStartE2EDuration="6.799969995s" podCreationTimestamp="2025-12-06 09:16:04 +0000 UTC" firstStartedPulling="2025-12-06 09:16:06.731068518 +0000 UTC m=+3849.306773576" lastFinishedPulling="2025-12-06 09:16:10.147389525 +0000 UTC m=+3852.723094563" observedRunningTime="2025-12-06 09:16:10.788443893 +0000 UTC m=+3853.364148931" watchObservedRunningTime="2025-12-06 09:16:10.799969995 +0000 UTC m=+3853.375675033" Dec 06 09:16:13 crc kubenswrapper[4763]: I1206 09:16:13.719977 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:16:13 crc kubenswrapper[4763]: E1206 09:16:13.720832 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:16:15 crc kubenswrapper[4763]: I1206 09:16:15.300650 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:15 crc kubenswrapper[4763]: I1206 09:16:15.300980 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:16 crc kubenswrapper[4763]: I1206 09:16:16.349160 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l6dhc" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="registry-server" probeResult="failure" output=< Dec 06 09:16:16 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 09:16:16 crc kubenswrapper[4763]: > Dec 06 09:16:25 crc kubenswrapper[4763]: I1206 09:16:25.351524 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:25 crc kubenswrapper[4763]: I1206 09:16:25.402142 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:25 crc kubenswrapper[4763]: I1206 09:16:25.587692 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:26 crc kubenswrapper[4763]: I1206 09:16:26.936803 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l6dhc" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="registry-server" containerID="cri-o://66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6" gracePeriod=2 Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.499174 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.663750 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities\") pod \"9e652b78-1aec-42b7-a2a9-6182340eabf0\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.664199 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qrsg\" (UniqueName: \"kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg\") pod \"9e652b78-1aec-42b7-a2a9-6182340eabf0\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.664328 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content\") pod \"9e652b78-1aec-42b7-a2a9-6182340eabf0\" (UID: \"9e652b78-1aec-42b7-a2a9-6182340eabf0\") " Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.664807 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities" (OuterVolumeSpecName: "utilities") pod "9e652b78-1aec-42b7-a2a9-6182340eabf0" (UID: "9e652b78-1aec-42b7-a2a9-6182340eabf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.665062 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.731529 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:16:27 crc kubenswrapper[4763]: E1206 09:16:27.732243 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.772180 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e652b78-1aec-42b7-a2a9-6182340eabf0" (UID: "9e652b78-1aec-42b7-a2a9-6182340eabf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.869247 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e652b78-1aec-42b7-a2a9-6182340eabf0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.948888 4763 generic.go:334] "Generic (PLEG): container finished" podID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerID="66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6" exitCode=0 Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.948952 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerDied","Data":"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6"} Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.948989 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6dhc" event={"ID":"9e652b78-1aec-42b7-a2a9-6182340eabf0","Type":"ContainerDied","Data":"39fee979a7275f7349154c0bc8d64c7f0773a51a7f10d7c07c4375fbf411bb31"} Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.948987 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6dhc" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.949011 4763 scope.go:117] "RemoveContainer" containerID="66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6" Dec 06 09:16:27 crc kubenswrapper[4763]: I1206 09:16:27.974268 4763 scope.go:117] "RemoveContainer" containerID="faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.366447 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg" (OuterVolumeSpecName: "kube-api-access-5qrsg") pod "9e652b78-1aec-42b7-a2a9-6182340eabf0" (UID: "9e652b78-1aec-42b7-a2a9-6182340eabf0"). InnerVolumeSpecName "kube-api-access-5qrsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.377955 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qrsg\" (UniqueName: \"kubernetes.io/projected/9e652b78-1aec-42b7-a2a9-6182340eabf0-kube-api-access-5qrsg\") on node \"crc\" DevicePath \"\"" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.385405 4763 scope.go:117] "RemoveContainer" containerID="86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.696598 4763 scope.go:117] "RemoveContainer" containerID="66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6" Dec 06 09:16:28 crc kubenswrapper[4763]: E1206 09:16:28.697243 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6\": container with ID starting with 66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6 not found: ID does not exist" containerID="66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.697288 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6"} err="failed to get container status \"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6\": rpc error: code = NotFound desc = could not find container \"66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6\": container with ID starting with 66c5940fecccbe16aac5151dd5456764f53e61d13411508992d65bc8ed6272c6 not found: ID does not exist" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.697315 4763 scope.go:117] "RemoveContainer" containerID="faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132" Dec 06 09:16:28 crc kubenswrapper[4763]: E1206 09:16:28.697575 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132\": container with ID starting with faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132 not found: ID does not exist" containerID="faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.697606 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132"} err="failed to get container status \"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132\": rpc error: code = NotFound desc = could not find container \"faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132\": container with ID starting with faae192d897f0b427d4208478c77f34ac2e8654c52d84eda5bbe11354d7cb132 not found: ID does not exist" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.697625 4763 scope.go:117] "RemoveContainer" containerID="86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba" Dec 06 09:16:28 crc kubenswrapper[4763]: E1206 09:16:28.697841 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba\": container with ID starting with 86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba not found: ID does not exist" containerID="86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.697867 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba"} err="failed to get container status \"86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba\": rpc error: code = NotFound desc = could not find container \"86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba\": container with ID starting with 86afe9b55d04faa5c145ce0aef1061c0e4f610b4ae497849134d0bb570d425ba not found: ID does not exist" Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.762570 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:28 crc kubenswrapper[4763]: I1206 09:16:28.774386 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l6dhc"] Dec 06 09:16:29 crc kubenswrapper[4763]: I1206 09:16:29.733324 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" path="/var/lib/kubelet/pods/9e652b78-1aec-42b7-a2a9-6182340eabf0/volumes" Dec 06 09:16:39 crc kubenswrapper[4763]: I1206 09:16:39.720179 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:16:39 crc kubenswrapper[4763]: E1206 09:16:39.722151 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:16:53 crc kubenswrapper[4763]: I1206 09:16:53.732109 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:16:53 crc kubenswrapper[4763]: E1206 09:16:53.733406 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.479134 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:02 crc kubenswrapper[4763]: E1206 09:17:02.480225 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="extract-content" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.480244 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="extract-content" Dec 06 09:17:02 crc kubenswrapper[4763]: E1206 09:17:02.480284 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="registry-server" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.480295 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="registry-server" Dec 06 09:17:02 crc kubenswrapper[4763]: E1206 09:17:02.480312 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="extract-utilities" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.480320 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="extract-utilities" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.480583 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e652b78-1aec-42b7-a2a9-6182340eabf0" containerName="registry-server" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.482450 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.490793 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.642672 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.643160 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.643303 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zggmd\" (UniqueName: \"kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.744780 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zggmd\" (UniqueName: \"kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.745206 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.745572 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.745839 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.746034 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.764838 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zggmd\" (UniqueName: \"kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd\") pod \"community-operators-575bv\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:02 crc kubenswrapper[4763]: I1206 09:17:02.862794 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:03 crc kubenswrapper[4763]: I1206 09:17:03.792106 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:04 crc kubenswrapper[4763]: I1206 09:17:04.321798 4763 generic.go:334] "Generic (PLEG): container finished" podID="d90284d4-292e-4cbd-889a-489233d044ed" containerID="77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249" exitCode=0 Dec 06 09:17:04 crc kubenswrapper[4763]: I1206 09:17:04.321862 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerDied","Data":"77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249"} Dec 06 09:17:04 crc kubenswrapper[4763]: I1206 09:17:04.322150 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerStarted","Data":"db6f77cae190b6c75382ce1b088bb0374a9a73cc9b52b44785b71dda04b176fa"} Dec 06 09:17:04 crc kubenswrapper[4763]: I1206 09:17:04.720219 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:17:04 crc kubenswrapper[4763]: E1206 09:17:04.720583 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:17:05 crc kubenswrapper[4763]: I1206 09:17:05.337818 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerStarted","Data":"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be"} Dec 06 09:17:06 crc kubenswrapper[4763]: I1206 09:17:06.352739 4763 generic.go:334] "Generic (PLEG): container finished" podID="d90284d4-292e-4cbd-889a-489233d044ed" containerID="9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be" exitCode=0 Dec 06 09:17:06 crc kubenswrapper[4763]: I1206 09:17:06.352861 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerDied","Data":"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be"} Dec 06 09:17:07 crc kubenswrapper[4763]: I1206 09:17:07.364379 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerStarted","Data":"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11"} Dec 06 09:17:07 crc kubenswrapper[4763]: I1206 09:17:07.380513 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-575bv" podStartSLOduration=2.970718519 podStartE2EDuration="5.380494191s" podCreationTimestamp="2025-12-06 09:17:02 +0000 UTC" firstStartedPulling="2025-12-06 09:17:04.323682338 +0000 UTC m=+3906.899387376" lastFinishedPulling="2025-12-06 09:17:06.73345802 +0000 UTC m=+3909.309163048" observedRunningTime="2025-12-06 09:17:07.38011877 +0000 UTC m=+3909.955823808" watchObservedRunningTime="2025-12-06 09:17:07.380494191 +0000 UTC m=+3909.956199229" Dec 06 09:17:12 crc kubenswrapper[4763]: I1206 09:17:12.863881 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:12 crc kubenswrapper[4763]: I1206 09:17:12.865702 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:12 crc kubenswrapper[4763]: I1206 09:17:12.961448 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:13 crc kubenswrapper[4763]: I1206 09:17:13.473413 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:13 crc kubenswrapper[4763]: I1206 09:17:13.531387 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:15 crc kubenswrapper[4763]: I1206 09:17:15.446548 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-575bv" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="registry-server" containerID="cri-o://1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11" gracePeriod=2 Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.038614 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.171476 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities\") pod \"d90284d4-292e-4cbd-889a-489233d044ed\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.171538 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content\") pod \"d90284d4-292e-4cbd-889a-489233d044ed\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.171572 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zggmd\" (UniqueName: \"kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd\") pod \"d90284d4-292e-4cbd-889a-489233d044ed\" (UID: \"d90284d4-292e-4cbd-889a-489233d044ed\") " Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.173551 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities" (OuterVolumeSpecName: "utilities") pod "d90284d4-292e-4cbd-889a-489233d044ed" (UID: "d90284d4-292e-4cbd-889a-489233d044ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.178251 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd" (OuterVolumeSpecName: "kube-api-access-zggmd") pod "d90284d4-292e-4cbd-889a-489233d044ed" (UID: "d90284d4-292e-4cbd-889a-489233d044ed"). InnerVolumeSpecName "kube-api-access-zggmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.232804 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d90284d4-292e-4cbd-889a-489233d044ed" (UID: "d90284d4-292e-4cbd-889a-489233d044ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.273522 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.273553 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d90284d4-292e-4cbd-889a-489233d044ed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.273581 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zggmd\" (UniqueName: \"kubernetes.io/projected/d90284d4-292e-4cbd-889a-489233d044ed-kube-api-access-zggmd\") on node \"crc\" DevicePath \"\"" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.456481 4763 generic.go:334] "Generic (PLEG): container finished" podID="d90284d4-292e-4cbd-889a-489233d044ed" containerID="1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11" exitCode=0 Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.456524 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-575bv" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.456533 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerDied","Data":"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11"} Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.456603 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-575bv" event={"ID":"d90284d4-292e-4cbd-889a-489233d044ed","Type":"ContainerDied","Data":"db6f77cae190b6c75382ce1b088bb0374a9a73cc9b52b44785b71dda04b176fa"} Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.456640 4763 scope.go:117] "RemoveContainer" containerID="1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.487186 4763 scope.go:117] "RemoveContainer" containerID="9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.488596 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.502728 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-575bv"] Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.523326 4763 scope.go:117] "RemoveContainer" containerID="77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.557492 4763 scope.go:117] "RemoveContainer" containerID="1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11" Dec 06 09:17:16 crc kubenswrapper[4763]: E1206 09:17:16.557844 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11\": container with ID starting with 1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11 not found: ID does not exist" containerID="1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.557879 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11"} err="failed to get container status \"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11\": rpc error: code = NotFound desc = could not find container \"1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11\": container with ID starting with 1794d94a41cafd6b42c0f8b18df9fa058edeb273c8d3453e04d42cd7bb37ee11 not found: ID does not exist" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.557919 4763 scope.go:117] "RemoveContainer" containerID="9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be" Dec 06 09:17:16 crc kubenswrapper[4763]: E1206 09:17:16.558154 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be\": container with ID starting with 9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be not found: ID does not exist" containerID="9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.558245 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be"} err="failed to get container status \"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be\": rpc error: code = NotFound desc = could not find container \"9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be\": container with ID starting with 9504cec9aedd24006d1caa3c31f9fbbbd6dc7b238670f5c108cc3325602d02be not found: ID does not exist" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.558323 4763 scope.go:117] "RemoveContainer" containerID="77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249" Dec 06 09:17:16 crc kubenswrapper[4763]: E1206 09:17:16.558592 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249\": container with ID starting with 77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249 not found: ID does not exist" containerID="77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249" Dec 06 09:17:16 crc kubenswrapper[4763]: I1206 09:17:16.558620 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249"} err="failed to get container status \"77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249\": rpc error: code = NotFound desc = could not find container \"77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249\": container with ID starting with 77e7c704a1a02d55fea4eb505dfc6c42b828c03a9e9140c6965cf1fe2da11249 not found: ID does not exist" Dec 06 09:17:17 crc kubenswrapper[4763]: I1206 09:17:17.742334 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d90284d4-292e-4cbd-889a-489233d044ed" path="/var/lib/kubelet/pods/d90284d4-292e-4cbd-889a-489233d044ed/volumes" Dec 06 09:17:19 crc kubenswrapper[4763]: I1206 09:17:19.724134 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:17:19 crc kubenswrapper[4763]: E1206 09:17:19.725548 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:17:31 crc kubenswrapper[4763]: I1206 09:17:31.720397 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:17:31 crc kubenswrapper[4763]: E1206 09:17:31.721140 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:17:44 crc kubenswrapper[4763]: I1206 09:17:44.719676 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:17:45 crc kubenswrapper[4763]: I1206 09:17:45.717000 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306"} Dec 06 09:19:19 crc kubenswrapper[4763]: E1206 09:19:19.869694 4763 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.18:56704->38.102.83.18:37247: write tcp 38.102.83.18:56704->38.102.83.18:37247: write: broken pipe Dec 06 09:19:26 crc kubenswrapper[4763]: E1206 09:19:26.497248 4763 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.18:56858->38.102.83.18:37247: write tcp 38.102.83.18:56858->38.102.83.18:37247: write: broken pipe Dec 06 09:20:12 crc kubenswrapper[4763]: I1206 09:20:12.537576 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:20:12 crc kubenswrapper[4763]: I1206 09:20:12.538191 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.529711 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:24 crc kubenswrapper[4763]: E1206 09:20:24.530791 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="extract-utilities" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.530806 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="extract-utilities" Dec 06 09:20:24 crc kubenswrapper[4763]: E1206 09:20:24.530837 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="extract-content" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.530845 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="extract-content" Dec 06 09:20:24 crc kubenswrapper[4763]: E1206 09:20:24.530860 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="registry-server" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.530868 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="registry-server" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.531134 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="d90284d4-292e-4cbd-889a-489233d044ed" containerName="registry-server" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.532862 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.561460 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.601861 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.602290 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.602447 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g54w\" (UniqueName: \"kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.703888 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.704194 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g54w\" (UniqueName: \"kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.704304 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.704509 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.704617 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.764773 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g54w\" (UniqueName: \"kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w\") pod \"certified-operators-76cvx\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:24 crc kubenswrapper[4763]: I1206 09:20:24.863059 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:25 crc kubenswrapper[4763]: I1206 09:20:25.411053 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:26 crc kubenswrapper[4763]: I1206 09:20:26.215623 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerDied","Data":"d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9"} Dec 06 09:20:26 crc kubenswrapper[4763]: I1206 09:20:26.214700 4763 generic.go:334] "Generic (PLEG): container finished" podID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerID="d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9" exitCode=0 Dec 06 09:20:26 crc kubenswrapper[4763]: I1206 09:20:26.216787 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerStarted","Data":"2ae6bc6b6ebab366e61d30283f6318fda77e023bf2ff95689942abe2d8e3566b"} Dec 06 09:20:26 crc kubenswrapper[4763]: I1206 09:20:26.217933 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:20:32 crc kubenswrapper[4763]: I1206 09:20:32.283861 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerStarted","Data":"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999"} Dec 06 09:20:33 crc kubenswrapper[4763]: I1206 09:20:33.294621 4763 generic.go:334] "Generic (PLEG): container finished" podID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerID="8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999" exitCode=0 Dec 06 09:20:33 crc kubenswrapper[4763]: I1206 09:20:33.294661 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerDied","Data":"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999"} Dec 06 09:20:34 crc kubenswrapper[4763]: I1206 09:20:34.307605 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerStarted","Data":"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb"} Dec 06 09:20:34 crc kubenswrapper[4763]: I1206 09:20:34.329092 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-76cvx" podStartSLOduration=2.877898277 podStartE2EDuration="10.329076376s" podCreationTimestamp="2025-12-06 09:20:24 +0000 UTC" firstStartedPulling="2025-12-06 09:20:26.217641558 +0000 UTC m=+4108.793346596" lastFinishedPulling="2025-12-06 09:20:33.668819647 +0000 UTC m=+4116.244524695" observedRunningTime="2025-12-06 09:20:34.32882873 +0000 UTC m=+4116.904533768" watchObservedRunningTime="2025-12-06 09:20:34.329076376 +0000 UTC m=+4116.904781414" Dec 06 09:20:34 crc kubenswrapper[4763]: I1206 09:20:34.863476 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:34 crc kubenswrapper[4763]: I1206 09:20:34.863572 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:35 crc kubenswrapper[4763]: I1206 09:20:35.906140 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-76cvx" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="registry-server" probeResult="failure" output=< Dec 06 09:20:35 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 09:20:35 crc kubenswrapper[4763]: > Dec 06 09:20:42 crc kubenswrapper[4763]: I1206 09:20:42.537103 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:20:42 crc kubenswrapper[4763]: I1206 09:20:42.540139 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:20:44 crc kubenswrapper[4763]: I1206 09:20:44.948842 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:45 crc kubenswrapper[4763]: I1206 09:20:45.055474 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:45 crc kubenswrapper[4763]: I1206 09:20:45.205670 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.409589 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-76cvx" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="registry-server" containerID="cri-o://fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb" gracePeriod=2 Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.881110 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.993279 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g54w\" (UniqueName: \"kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w\") pod \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.993377 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content\") pod \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.993426 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities\") pod \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\" (UID: \"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4\") " Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.994378 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities" (OuterVolumeSpecName: "utilities") pod "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" (UID: "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:20:46 crc kubenswrapper[4763]: I1206 09:20:46.999655 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w" (OuterVolumeSpecName: "kube-api-access-2g54w") pod "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" (UID: "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4"). InnerVolumeSpecName "kube-api-access-2g54w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.041098 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" (UID: "7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.095708 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g54w\" (UniqueName: \"kubernetes.io/projected/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-kube-api-access-2g54w\") on node \"crc\" DevicePath \"\"" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.095743 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.095755 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.419326 4763 generic.go:334] "Generic (PLEG): container finished" podID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerID="fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb" exitCode=0 Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.419409 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-76cvx" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.419408 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerDied","Data":"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb"} Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.419704 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-76cvx" event={"ID":"7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4","Type":"ContainerDied","Data":"2ae6bc6b6ebab366e61d30283f6318fda77e023bf2ff95689942abe2d8e3566b"} Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.419725 4763 scope.go:117] "RemoveContainer" containerID="fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.442558 4763 scope.go:117] "RemoveContainer" containerID="8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.456150 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.465924 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-76cvx"] Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.491291 4763 scope.go:117] "RemoveContainer" containerID="d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.520340 4763 scope.go:117] "RemoveContainer" containerID="fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb" Dec 06 09:20:47 crc kubenswrapper[4763]: E1206 09:20:47.522242 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb\": container with ID starting with fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb not found: ID does not exist" containerID="fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.522299 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb"} err="failed to get container status \"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb\": rpc error: code = NotFound desc = could not find container \"fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb\": container with ID starting with fd31390449e8d8fa76a398c4c7f6a5d3f93f856d949fdf2e35856e321c977ccb not found: ID does not exist" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.522332 4763 scope.go:117] "RemoveContainer" containerID="8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999" Dec 06 09:20:47 crc kubenswrapper[4763]: E1206 09:20:47.522783 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999\": container with ID starting with 8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999 not found: ID does not exist" containerID="8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.522833 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999"} err="failed to get container status \"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999\": rpc error: code = NotFound desc = could not find container \"8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999\": container with ID starting with 8a1ee5b8e5f6e374af83507d631521be2ee218fe9107363018253e2ddb32e999 not found: ID does not exist" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.522864 4763 scope.go:117] "RemoveContainer" containerID="d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9" Dec 06 09:20:47 crc kubenswrapper[4763]: E1206 09:20:47.523170 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9\": container with ID starting with d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9 not found: ID does not exist" containerID="d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.523271 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9"} err="failed to get container status \"d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9\": rpc error: code = NotFound desc = could not find container \"d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9\": container with ID starting with d7933c6bdd2470d432d908676bd8b1dc60f3ffa2cf21b8659de6d9662cc2abd9 not found: ID does not exist" Dec 06 09:20:47 crc kubenswrapper[4763]: I1206 09:20:47.732099 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" path="/var/lib/kubelet/pods/7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4/volumes" Dec 06 09:21:12 crc kubenswrapper[4763]: I1206 09:21:12.537080 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:21:12 crc kubenswrapper[4763]: I1206 09:21:12.537641 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:21:12 crc kubenswrapper[4763]: I1206 09:21:12.537686 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:21:12 crc kubenswrapper[4763]: I1206 09:21:12.538556 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:21:12 crc kubenswrapper[4763]: I1206 09:21:12.538609 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306" gracePeriod=600 Dec 06 09:21:13 crc kubenswrapper[4763]: I1206 09:21:13.523826 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306" exitCode=0 Dec 06 09:21:13 crc kubenswrapper[4763]: I1206 09:21:13.523857 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306"} Dec 06 09:21:13 crc kubenswrapper[4763]: I1206 09:21:13.524387 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd"} Dec 06 09:21:13 crc kubenswrapper[4763]: I1206 09:21:13.524407 4763 scope.go:117] "RemoveContainer" containerID="6463d53fbf249295b832e9b6e5c9b2e5f6bf0c267afc05d9ec75a5dac620fb04" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.701046 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:01 crc kubenswrapper[4763]: E1206 09:23:01.701986 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="registry-server" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.702006 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="registry-server" Dec 06 09:23:01 crc kubenswrapper[4763]: E1206 09:23:01.702030 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="extract-content" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.702037 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="extract-content" Dec 06 09:23:01 crc kubenswrapper[4763]: E1206 09:23:01.702066 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="extract-utilities" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.702073 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="extract-utilities" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.702298 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="7764e84a-e4a9-4a3f-ac06-4d4bbd5708f4" containerName="registry-server" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.703753 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.712944 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.800397 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.800539 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgjsk\" (UniqueName: \"kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.800727 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.902816 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.902997 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.903053 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgjsk\" (UniqueName: \"kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.903520 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.903526 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:01 crc kubenswrapper[4763]: I1206 09:23:01.932485 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgjsk\" (UniqueName: \"kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk\") pod \"redhat-marketplace-gk24j\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:02 crc kubenswrapper[4763]: I1206 09:23:02.055608 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:02 crc kubenswrapper[4763]: I1206 09:23:02.597012 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:02 crc kubenswrapper[4763]: I1206 09:23:02.622364 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerStarted","Data":"e7aaabf9a578ec3e2167093b4cfbe0b2d0b54db2f5a9d0308c17d56e204b9a45"} Dec 06 09:23:03 crc kubenswrapper[4763]: I1206 09:23:03.634441 4763 generic.go:334] "Generic (PLEG): container finished" podID="8341298e-3a49-4048-adc9-87b6986f55bc" containerID="0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c" exitCode=0 Dec 06 09:23:03 crc kubenswrapper[4763]: I1206 09:23:03.634519 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerDied","Data":"0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c"} Dec 06 09:23:04 crc kubenswrapper[4763]: I1206 09:23:04.660247 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerStarted","Data":"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e"} Dec 06 09:23:05 crc kubenswrapper[4763]: I1206 09:23:05.670729 4763 generic.go:334] "Generic (PLEG): container finished" podID="8341298e-3a49-4048-adc9-87b6986f55bc" containerID="49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e" exitCode=0 Dec 06 09:23:05 crc kubenswrapper[4763]: I1206 09:23:05.671055 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerDied","Data":"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e"} Dec 06 09:23:06 crc kubenswrapper[4763]: I1206 09:23:06.681247 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerStarted","Data":"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899"} Dec 06 09:23:06 crc kubenswrapper[4763]: I1206 09:23:06.697483 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gk24j" podStartSLOduration=3.263801654 podStartE2EDuration="5.697442235s" podCreationTimestamp="2025-12-06 09:23:01 +0000 UTC" firstStartedPulling="2025-12-06 09:23:03.638117973 +0000 UTC m=+4266.213823011" lastFinishedPulling="2025-12-06 09:23:06.071758554 +0000 UTC m=+4268.647463592" observedRunningTime="2025-12-06 09:23:06.694881135 +0000 UTC m=+4269.270586183" watchObservedRunningTime="2025-12-06 09:23:06.697442235 +0000 UTC m=+4269.273147273" Dec 06 09:23:10 crc kubenswrapper[4763]: E1206 09:23:10.903408 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.056166 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.056567 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.120030 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.537327 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.537387 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.774242 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:12 crc kubenswrapper[4763]: I1206 09:23:12.819887 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:14 crc kubenswrapper[4763]: I1206 09:23:14.750175 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gk24j" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="registry-server" containerID="cri-o://490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899" gracePeriod=2 Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.233764 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.396256 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content\") pod \"8341298e-3a49-4048-adc9-87b6986f55bc\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.396541 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgjsk\" (UniqueName: \"kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk\") pod \"8341298e-3a49-4048-adc9-87b6986f55bc\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.396936 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities\") pod \"8341298e-3a49-4048-adc9-87b6986f55bc\" (UID: \"8341298e-3a49-4048-adc9-87b6986f55bc\") " Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.398394 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities" (OuterVolumeSpecName: "utilities") pod "8341298e-3a49-4048-adc9-87b6986f55bc" (UID: "8341298e-3a49-4048-adc9-87b6986f55bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.403482 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk" (OuterVolumeSpecName: "kube-api-access-hgjsk") pod "8341298e-3a49-4048-adc9-87b6986f55bc" (UID: "8341298e-3a49-4048-adc9-87b6986f55bc"). InnerVolumeSpecName "kube-api-access-hgjsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.418922 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8341298e-3a49-4048-adc9-87b6986f55bc" (UID: "8341298e-3a49-4048-adc9-87b6986f55bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.499585 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgjsk\" (UniqueName: \"kubernetes.io/projected/8341298e-3a49-4048-adc9-87b6986f55bc-kube-api-access-hgjsk\") on node \"crc\" DevicePath \"\"" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.499621 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.499631 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8341298e-3a49-4048-adc9-87b6986f55bc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.761868 4763 generic.go:334] "Generic (PLEG): container finished" podID="8341298e-3a49-4048-adc9-87b6986f55bc" containerID="490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899" exitCode=0 Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.761949 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk24j" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.761944 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerDied","Data":"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899"} Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.763040 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk24j" event={"ID":"8341298e-3a49-4048-adc9-87b6986f55bc","Type":"ContainerDied","Data":"e7aaabf9a578ec3e2167093b4cfbe0b2d0b54db2f5a9d0308c17d56e204b9a45"} Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.763064 4763 scope.go:117] "RemoveContainer" containerID="490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.792167 4763 scope.go:117] "RemoveContainer" containerID="49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.798405 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.811862 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk24j"] Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.816277 4763 scope.go:117] "RemoveContainer" containerID="0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.870465 4763 scope.go:117] "RemoveContainer" containerID="490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899" Dec 06 09:23:15 crc kubenswrapper[4763]: E1206 09:23:15.870870 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899\": container with ID starting with 490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899 not found: ID does not exist" containerID="490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.870936 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899"} err="failed to get container status \"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899\": rpc error: code = NotFound desc = could not find container \"490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899\": container with ID starting with 490bee134fa794ccd2ca80f8da75aec2693973745e04f9ccb99b80e929139899 not found: ID does not exist" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.870962 4763 scope.go:117] "RemoveContainer" containerID="49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e" Dec 06 09:23:15 crc kubenswrapper[4763]: E1206 09:23:15.871206 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e\": container with ID starting with 49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e not found: ID does not exist" containerID="49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.871263 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e"} err="failed to get container status \"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e\": rpc error: code = NotFound desc = could not find container \"49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e\": container with ID starting with 49b4f7840dd452a46e6c49e43c9dd8d9d9473b8da660dc1ceb2b7b9d82666b4e not found: ID does not exist" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.871284 4763 scope.go:117] "RemoveContainer" containerID="0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c" Dec 06 09:23:15 crc kubenswrapper[4763]: E1206 09:23:15.871497 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c\": container with ID starting with 0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c not found: ID does not exist" containerID="0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c" Dec 06 09:23:15 crc kubenswrapper[4763]: I1206 09:23:15.871537 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c"} err="failed to get container status \"0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c\": rpc error: code = NotFound desc = could not find container \"0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c\": container with ID starting with 0642f0696b7d6b5e3bfee1b4f49192a4964b0e9b2442b41533ea2f51f6d0d61c not found: ID does not exist" Dec 06 09:23:17 crc kubenswrapper[4763]: I1206 09:23:17.730018 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" path="/var/lib/kubelet/pods/8341298e-3a49-4048-adc9-87b6986f55bc/volumes" Dec 06 09:23:42 crc kubenswrapper[4763]: I1206 09:23:42.536500 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:23:42 crc kubenswrapper[4763]: I1206 09:23:42.537074 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:24:12 crc kubenswrapper[4763]: I1206 09:24:12.541306 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:24:12 crc kubenswrapper[4763]: I1206 09:24:12.541795 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:24:12 crc kubenswrapper[4763]: I1206 09:24:12.541843 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:24:12 crc kubenswrapper[4763]: I1206 09:24:12.542645 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:24:12 crc kubenswrapper[4763]: I1206 09:24:12.542713 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" gracePeriod=600 Dec 06 09:24:12 crc kubenswrapper[4763]: E1206 09:24:12.673424 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:24:13 crc kubenswrapper[4763]: I1206 09:24:13.330396 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" exitCode=0 Dec 06 09:24:13 crc kubenswrapper[4763]: I1206 09:24:13.330450 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd"} Dec 06 09:24:13 crc kubenswrapper[4763]: I1206 09:24:13.330640 4763 scope.go:117] "RemoveContainer" containerID="28437ee2316debbdb76f6f0ec1434827b0aa2f3cb5b8b4e65763e2d54bc56306" Dec 06 09:24:13 crc kubenswrapper[4763]: I1206 09:24:13.331214 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:24:13 crc kubenswrapper[4763]: E1206 09:24:13.331438 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:24:24 crc kubenswrapper[4763]: I1206 09:24:24.719355 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:24:24 crc kubenswrapper[4763]: E1206 09:24:24.720370 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:24:37 crc kubenswrapper[4763]: I1206 09:24:37.746421 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:24:37 crc kubenswrapper[4763]: E1206 09:24:37.748005 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:24:51 crc kubenswrapper[4763]: I1206 09:24:51.720189 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:24:51 crc kubenswrapper[4763]: E1206 09:24:51.721025 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:25:05 crc kubenswrapper[4763]: I1206 09:25:05.736299 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:25:05 crc kubenswrapper[4763]: E1206 09:25:05.740513 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:25:16 crc kubenswrapper[4763]: I1206 09:25:16.720186 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:25:16 crc kubenswrapper[4763]: E1206 09:25:16.721090 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:25:29 crc kubenswrapper[4763]: I1206 09:25:29.720730 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:25:29 crc kubenswrapper[4763]: E1206 09:25:29.721889 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:25:44 crc kubenswrapper[4763]: I1206 09:25:44.719648 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:25:44 crc kubenswrapper[4763]: E1206 09:25:44.721766 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:25:56 crc kubenswrapper[4763]: I1206 09:25:56.719965 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:25:56 crc kubenswrapper[4763]: E1206 09:25:56.720745 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:26:07 crc kubenswrapper[4763]: I1206 09:26:07.728452 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:26:07 crc kubenswrapper[4763]: E1206 09:26:07.729172 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:26:19 crc kubenswrapper[4763]: I1206 09:26:19.719609 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:26:19 crc kubenswrapper[4763]: E1206 09:26:19.720356 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.600810 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:27 crc kubenswrapper[4763]: E1206 09:26:27.601981 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="extract-content" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.601995 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="extract-content" Dec 06 09:26:27 crc kubenswrapper[4763]: E1206 09:26:27.602012 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="extract-utilities" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.602020 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="extract-utilities" Dec 06 09:26:27 crc kubenswrapper[4763]: E1206 09:26:27.602066 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="registry-server" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.602072 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="registry-server" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.602299 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="8341298e-3a49-4048-adc9-87b6986f55bc" containerName="registry-server" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.603987 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.625215 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.713496 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.713575 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh8kk\" (UniqueName: \"kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.713674 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.816235 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.816299 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh8kk\" (UniqueName: \"kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.816412 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.817078 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.817537 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.841155 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh8kk\" (UniqueName: \"kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk\") pod \"redhat-operators-vl8q9\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:27 crc kubenswrapper[4763]: I1206 09:26:27.968477 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:28 crc kubenswrapper[4763]: I1206 09:26:28.473918 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:28 crc kubenswrapper[4763]: I1206 09:26:28.651710 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerStarted","Data":"dd6188626bb37dfc571a50589dc34989662295abe534a4e3698a545f48a5e3f6"} Dec 06 09:26:29 crc kubenswrapper[4763]: I1206 09:26:29.664256 4763 generic.go:334] "Generic (PLEG): container finished" podID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerID="fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc" exitCode=0 Dec 06 09:26:29 crc kubenswrapper[4763]: I1206 09:26:29.664693 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerDied","Data":"fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc"} Dec 06 09:26:29 crc kubenswrapper[4763]: I1206 09:26:29.667375 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:26:30 crc kubenswrapper[4763]: I1206 09:26:30.677505 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerStarted","Data":"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8"} Dec 06 09:26:34 crc kubenswrapper[4763]: I1206 09:26:34.711544 4763 generic.go:334] "Generic (PLEG): container finished" podID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerID="f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8" exitCode=0 Dec 06 09:26:34 crc kubenswrapper[4763]: I1206 09:26:34.711687 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerDied","Data":"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8"} Dec 06 09:26:34 crc kubenswrapper[4763]: I1206 09:26:34.720084 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:26:34 crc kubenswrapper[4763]: E1206 09:26:34.720406 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:26:35 crc kubenswrapper[4763]: I1206 09:26:35.735843 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerStarted","Data":"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f"} Dec 06 09:26:35 crc kubenswrapper[4763]: I1206 09:26:35.754784 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vl8q9" podStartSLOduration=3.319077234 podStartE2EDuration="8.754768613s" podCreationTimestamp="2025-12-06 09:26:27 +0000 UTC" firstStartedPulling="2025-12-06 09:26:29.667074309 +0000 UTC m=+4472.242779347" lastFinishedPulling="2025-12-06 09:26:35.102765688 +0000 UTC m=+4477.678470726" observedRunningTime="2025-12-06 09:26:35.753170839 +0000 UTC m=+4478.328875887" watchObservedRunningTime="2025-12-06 09:26:35.754768613 +0000 UTC m=+4478.330473651" Dec 06 09:26:37 crc kubenswrapper[4763]: I1206 09:26:37.969082 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:37 crc kubenswrapper[4763]: I1206 09:26:37.969536 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:39 crc kubenswrapper[4763]: I1206 09:26:39.027244 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vl8q9" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="registry-server" probeResult="failure" output=< Dec 06 09:26:39 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 09:26:39 crc kubenswrapper[4763]: > Dec 06 09:26:46 crc kubenswrapper[4763]: I1206 09:26:46.719978 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:26:46 crc kubenswrapper[4763]: E1206 09:26:46.721691 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:26:48 crc kubenswrapper[4763]: I1206 09:26:48.207824 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:48 crc kubenswrapper[4763]: I1206 09:26:48.271011 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:48 crc kubenswrapper[4763]: I1206 09:26:48.444190 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:49 crc kubenswrapper[4763]: I1206 09:26:49.838562 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vl8q9" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="registry-server" containerID="cri-o://e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f" gracePeriod=2 Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.424295 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.628294 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities\") pod \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.628425 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content\") pod \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.628458 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh8kk\" (UniqueName: \"kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk\") pod \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\" (UID: \"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8\") " Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.629065 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities" (OuterVolumeSpecName: "utilities") pod "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" (UID: "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.635274 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk" (OuterVolumeSpecName: "kube-api-access-jh8kk") pod "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" (UID: "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8"). InnerVolumeSpecName "kube-api-access-jh8kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.729721 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" (UID: "56fef495-0c9f-44cb-a8f0-ea5bb485ddb8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.730804 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.730837 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.730849 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh8kk\" (UniqueName: \"kubernetes.io/projected/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8-kube-api-access-jh8kk\") on node \"crc\" DevicePath \"\"" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.850188 4763 generic.go:334] "Generic (PLEG): container finished" podID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerID="e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f" exitCode=0 Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.850242 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerDied","Data":"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f"} Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.851072 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl8q9" event={"ID":"56fef495-0c9f-44cb-a8f0-ea5bb485ddb8","Type":"ContainerDied","Data":"dd6188626bb37dfc571a50589dc34989662295abe534a4e3698a545f48a5e3f6"} Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.850284 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl8q9" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.851181 4763 scope.go:117] "RemoveContainer" containerID="e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.881447 4763 scope.go:117] "RemoveContainer" containerID="f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.892572 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.900979 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vl8q9"] Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.913433 4763 scope.go:117] "RemoveContainer" containerID="fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.962834 4763 scope.go:117] "RemoveContainer" containerID="e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f" Dec 06 09:26:50 crc kubenswrapper[4763]: E1206 09:26:50.963434 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f\": container with ID starting with e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f not found: ID does not exist" containerID="e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.963477 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f"} err="failed to get container status \"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f\": rpc error: code = NotFound desc = could not find container \"e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f\": container with ID starting with e7a6e5ec4987118cd7457862967f4edf43aa486f5343b7648acf01f3b8259d1f not found: ID does not exist" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.963502 4763 scope.go:117] "RemoveContainer" containerID="f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8" Dec 06 09:26:50 crc kubenswrapper[4763]: E1206 09:26:50.963855 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8\": container with ID starting with f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8 not found: ID does not exist" containerID="f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.963882 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8"} err="failed to get container status \"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8\": rpc error: code = NotFound desc = could not find container \"f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8\": container with ID starting with f548c5c43f7a319ca030edf07948924146976e5305b04da34cb3f3eb711619b8 not found: ID does not exist" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.963959 4763 scope.go:117] "RemoveContainer" containerID="fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc" Dec 06 09:26:50 crc kubenswrapper[4763]: E1206 09:26:50.964216 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc\": container with ID starting with fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc not found: ID does not exist" containerID="fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc" Dec 06 09:26:50 crc kubenswrapper[4763]: I1206 09:26:50.964253 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc"} err="failed to get container status \"fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc\": rpc error: code = NotFound desc = could not find container \"fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc\": container with ID starting with fe2cd5bfebfb372f7be4af9ba7af84cef8eece3d98c4ed205189144e01ab10dc not found: ID does not exist" Dec 06 09:26:51 crc kubenswrapper[4763]: I1206 09:26:51.730024 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" path="/var/lib/kubelet/pods/56fef495-0c9f-44cb-a8f0-ea5bb485ddb8/volumes" Dec 06 09:27:01 crc kubenswrapper[4763]: I1206 09:27:01.721444 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:27:01 crc kubenswrapper[4763]: E1206 09:27:01.722863 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:27:13 crc kubenswrapper[4763]: I1206 09:27:13.719957 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:27:13 crc kubenswrapper[4763]: E1206 09:27:13.720919 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.725715 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:16 crc kubenswrapper[4763]: E1206 09:27:16.727620 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="extract-content" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.727641 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="extract-content" Dec 06 09:27:16 crc kubenswrapper[4763]: E1206 09:27:16.727658 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="extract-utilities" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.727664 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="extract-utilities" Dec 06 09:27:16 crc kubenswrapper[4763]: E1206 09:27:16.727685 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="registry-server" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.727692 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="registry-server" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.727963 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="56fef495-0c9f-44cb-a8f0-ea5bb485ddb8" containerName="registry-server" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.729544 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.738535 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.865962 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b55x9\" (UniqueName: \"kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.866019 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.866053 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.968058 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.968315 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b55x9\" (UniqueName: \"kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.968379 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.968719 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.968888 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:16 crc kubenswrapper[4763]: I1206 09:27:16.992142 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b55x9\" (UniqueName: \"kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9\") pod \"community-operators-746mn\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:17 crc kubenswrapper[4763]: I1206 09:27:17.104879 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:17 crc kubenswrapper[4763]: I1206 09:27:17.746107 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:19 crc kubenswrapper[4763]: I1206 09:27:19.098343 4763 generic.go:334] "Generic (PLEG): container finished" podID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerID="4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c" exitCode=0 Dec 06 09:27:19 crc kubenswrapper[4763]: I1206 09:27:19.098421 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerDied","Data":"4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c"} Dec 06 09:27:19 crc kubenswrapper[4763]: I1206 09:27:19.098796 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerStarted","Data":"fb3502697e4489a3b11a07e1436ab791b3b39f67bf5a1ab4760827921decd36c"} Dec 06 09:27:21 crc kubenswrapper[4763]: I1206 09:27:21.123637 4763 generic.go:334] "Generic (PLEG): container finished" podID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerID="d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162" exitCode=0 Dec 06 09:27:21 crc kubenswrapper[4763]: I1206 09:27:21.123730 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerDied","Data":"d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162"} Dec 06 09:27:22 crc kubenswrapper[4763]: I1206 09:27:22.134534 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerStarted","Data":"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39"} Dec 06 09:27:22 crc kubenswrapper[4763]: I1206 09:27:22.150121 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-746mn" podStartSLOduration=3.7101925529999997 podStartE2EDuration="6.150098563s" podCreationTimestamp="2025-12-06 09:27:16 +0000 UTC" firstStartedPulling="2025-12-06 09:27:19.100826584 +0000 UTC m=+4521.676531622" lastFinishedPulling="2025-12-06 09:27:21.540732594 +0000 UTC m=+4524.116437632" observedRunningTime="2025-12-06 09:27:22.149693792 +0000 UTC m=+4524.725398860" watchObservedRunningTime="2025-12-06 09:27:22.150098563 +0000 UTC m=+4524.725803601" Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.105951 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.106501 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.153892 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.232648 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.392055 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:27 crc kubenswrapper[4763]: I1206 09:27:27.749627 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:27:27 crc kubenswrapper[4763]: E1206 09:27:27.749957 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.202291 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-746mn" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="registry-server" containerID="cri-o://eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39" gracePeriod=2 Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.674968 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.740444 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities\") pod \"00adf0e3-47d9-424e-8099-f0bed8a63873\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.740929 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content\") pod \"00adf0e3-47d9-424e-8099-f0bed8a63873\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.741139 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b55x9\" (UniqueName: \"kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9\") pod \"00adf0e3-47d9-424e-8099-f0bed8a63873\" (UID: \"00adf0e3-47d9-424e-8099-f0bed8a63873\") " Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.741501 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities" (OuterVolumeSpecName: "utilities") pod "00adf0e3-47d9-424e-8099-f0bed8a63873" (UID: "00adf0e3-47d9-424e-8099-f0bed8a63873"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.741892 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.750551 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9" (OuterVolumeSpecName: "kube-api-access-b55x9") pod "00adf0e3-47d9-424e-8099-f0bed8a63873" (UID: "00adf0e3-47d9-424e-8099-f0bed8a63873"). InnerVolumeSpecName "kube-api-access-b55x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.798805 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00adf0e3-47d9-424e-8099-f0bed8a63873" (UID: "00adf0e3-47d9-424e-8099-f0bed8a63873"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.844678 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00adf0e3-47d9-424e-8099-f0bed8a63873-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:27:29 crc kubenswrapper[4763]: I1206 09:27:29.845148 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b55x9\" (UniqueName: \"kubernetes.io/projected/00adf0e3-47d9-424e-8099-f0bed8a63873-kube-api-access-b55x9\") on node \"crc\" DevicePath \"\"" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.217423 4763 generic.go:334] "Generic (PLEG): container finished" podID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerID="eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39" exitCode=0 Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.217487 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerDied","Data":"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39"} Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.217533 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-746mn" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.218759 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-746mn" event={"ID":"00adf0e3-47d9-424e-8099-f0bed8a63873","Type":"ContainerDied","Data":"fb3502697e4489a3b11a07e1436ab791b3b39f67bf5a1ab4760827921decd36c"} Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.218850 4763 scope.go:117] "RemoveContainer" containerID="eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.242583 4763 scope.go:117] "RemoveContainer" containerID="d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.262890 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.272360 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-746mn"] Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.277566 4763 scope.go:117] "RemoveContainer" containerID="4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.327716 4763 scope.go:117] "RemoveContainer" containerID="eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39" Dec 06 09:27:30 crc kubenswrapper[4763]: E1206 09:27:30.328258 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39\": container with ID starting with eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39 not found: ID does not exist" containerID="eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.328294 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39"} err="failed to get container status \"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39\": rpc error: code = NotFound desc = could not find container \"eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39\": container with ID starting with eef4ab7750336daacdbeab03c2f26d0477138e6400126ac84208bd031d7f7a39 not found: ID does not exist" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.328322 4763 scope.go:117] "RemoveContainer" containerID="d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162" Dec 06 09:27:30 crc kubenswrapper[4763]: E1206 09:27:30.328700 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162\": container with ID starting with d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162 not found: ID does not exist" containerID="d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.328756 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162"} err="failed to get container status \"d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162\": rpc error: code = NotFound desc = could not find container \"d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162\": container with ID starting with d0a4c1c39d2b4e9e516b91ff66dea14a9c7d0ab499a83d9507982a4c91c0a162 not found: ID does not exist" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.328788 4763 scope.go:117] "RemoveContainer" containerID="4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c" Dec 06 09:27:30 crc kubenswrapper[4763]: E1206 09:27:30.329119 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c\": container with ID starting with 4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c not found: ID does not exist" containerID="4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c" Dec 06 09:27:30 crc kubenswrapper[4763]: I1206 09:27:30.329171 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c"} err="failed to get container status \"4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c\": rpc error: code = NotFound desc = could not find container \"4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c\": container with ID starting with 4c0d098608999f4507c764c8f440ee823ff98ac582ff98f8dd6b4681f262418c not found: ID does not exist" Dec 06 09:27:31 crc kubenswrapper[4763]: I1206 09:27:31.730223 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" path="/var/lib/kubelet/pods/00adf0e3-47d9-424e-8099-f0bed8a63873/volumes" Dec 06 09:27:38 crc kubenswrapper[4763]: I1206 09:27:38.719767 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:27:38 crc kubenswrapper[4763]: E1206 09:27:38.720934 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:27:53 crc kubenswrapper[4763]: I1206 09:27:53.722833 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:27:53 crc kubenswrapper[4763]: E1206 09:27:53.724479 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:28:04 crc kubenswrapper[4763]: I1206 09:28:04.720098 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:28:04 crc kubenswrapper[4763]: E1206 09:28:04.720891 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:28:17 crc kubenswrapper[4763]: I1206 09:28:17.727183 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:28:17 crc kubenswrapper[4763]: E1206 09:28:17.728052 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:28:32 crc kubenswrapper[4763]: I1206 09:28:32.720576 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:28:32 crc kubenswrapper[4763]: E1206 09:28:32.721484 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:28:45 crc kubenswrapper[4763]: I1206 09:28:45.719922 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:28:45 crc kubenswrapper[4763]: E1206 09:28:45.720891 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:28:56 crc kubenswrapper[4763]: I1206 09:28:56.719883 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:28:56 crc kubenswrapper[4763]: E1206 09:28:56.720640 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:29:10 crc kubenswrapper[4763]: I1206 09:29:10.720323 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:29:10 crc kubenswrapper[4763]: E1206 09:29:10.722092 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:29:23 crc kubenswrapper[4763]: I1206 09:29:23.721160 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:29:24 crc kubenswrapper[4763]: I1206 09:29:24.216091 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272"} Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.144857 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp"] Dec 06 09:30:00 crc kubenswrapper[4763]: E1206 09:30:00.148138 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="extract-utilities" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.148233 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="extract-utilities" Dec 06 09:30:00 crc kubenswrapper[4763]: E1206 09:30:00.148301 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="extract-content" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.148360 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="extract-content" Dec 06 09:30:00 crc kubenswrapper[4763]: E1206 09:30:00.148427 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="registry-server" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.148480 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="registry-server" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.148720 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="00adf0e3-47d9-424e-8099-f0bed8a63873" containerName="registry-server" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.149509 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.151745 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.151979 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.155983 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp"] Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.251149 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.251214 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tclm\" (UniqueName: \"kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.251356 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.353273 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.353411 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.353451 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tclm\" (UniqueName: \"kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.354660 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.361463 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.370697 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tclm\" (UniqueName: \"kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm\") pod \"collect-profiles-29416890-b9kqp\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.471460 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:00 crc kubenswrapper[4763]: I1206 09:30:00.960095 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp"] Dec 06 09:30:01 crc kubenswrapper[4763]: I1206 09:30:01.576150 4763 generic.go:334] "Generic (PLEG): container finished" podID="2408031a-7cf2-4f70-9255-686ebfce6329" containerID="7fc52f70cc0077ca32b3a1a3048ca3fdad75d056a84b92c06e81a0960d13d4f8" exitCode=0 Dec 06 09:30:01 crc kubenswrapper[4763]: I1206 09:30:01.576219 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" event={"ID":"2408031a-7cf2-4f70-9255-686ebfce6329","Type":"ContainerDied","Data":"7fc52f70cc0077ca32b3a1a3048ca3fdad75d056a84b92c06e81a0960d13d4f8"} Dec 06 09:30:01 crc kubenswrapper[4763]: I1206 09:30:01.576447 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" event={"ID":"2408031a-7cf2-4f70-9255-686ebfce6329","Type":"ContainerStarted","Data":"8e1a66d15594c1d2bfd7681f40b8b24ab6eccb818770c3104b713fc807873591"} Dec 06 09:30:02 crc kubenswrapper[4763]: I1206 09:30:02.931050 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.031230 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume\") pod \"2408031a-7cf2-4f70-9255-686ebfce6329\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.031294 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume\") pod \"2408031a-7cf2-4f70-9255-686ebfce6329\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.031330 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tclm\" (UniqueName: \"kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm\") pod \"2408031a-7cf2-4f70-9255-686ebfce6329\" (UID: \"2408031a-7cf2-4f70-9255-686ebfce6329\") " Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.032225 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume" (OuterVolumeSpecName: "config-volume") pod "2408031a-7cf2-4f70-9255-686ebfce6329" (UID: "2408031a-7cf2-4f70-9255-686ebfce6329"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.045139 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2408031a-7cf2-4f70-9255-686ebfce6329" (UID: "2408031a-7cf2-4f70-9255-686ebfce6329"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.045222 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm" (OuterVolumeSpecName: "kube-api-access-9tclm") pod "2408031a-7cf2-4f70-9255-686ebfce6329" (UID: "2408031a-7cf2-4f70-9255-686ebfce6329"). InnerVolumeSpecName "kube-api-access-9tclm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.134670 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2408031a-7cf2-4f70-9255-686ebfce6329-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.134725 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2408031a-7cf2-4f70-9255-686ebfce6329-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.134737 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tclm\" (UniqueName: \"kubernetes.io/projected/2408031a-7cf2-4f70-9255-686ebfce6329-kube-api-access-9tclm\") on node \"crc\" DevicePath \"\"" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.596062 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" event={"ID":"2408031a-7cf2-4f70-9255-686ebfce6329","Type":"ContainerDied","Data":"8e1a66d15594c1d2bfd7681f40b8b24ab6eccb818770c3104b713fc807873591"} Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.596110 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e1a66d15594c1d2bfd7681f40b8b24ab6eccb818770c3104b713fc807873591" Dec 06 09:30:03 crc kubenswrapper[4763]: I1206 09:30:03.596226 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416890-b9kqp" Dec 06 09:30:04 crc kubenswrapper[4763]: I1206 09:30:04.004289 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8"] Dec 06 09:30:04 crc kubenswrapper[4763]: I1206 09:30:04.013142 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416845-l4lg8"] Dec 06 09:30:05 crc kubenswrapper[4763]: I1206 09:30:05.729777 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1696c5a6-4a1e-454c-bc65-99f6b7cfbe67" path="/var/lib/kubelet/pods/1696c5a6-4a1e-454c-bc65-99f6b7cfbe67/volumes" Dec 06 09:30:24 crc kubenswrapper[4763]: I1206 09:30:24.887405 4763 scope.go:117] "RemoveContainer" containerID="7aa366474eb624c4022bb1569368528ec6b1e6225ab12080e3bb2a24fe495d50" Dec 06 09:30:31 crc kubenswrapper[4763]: I1206 09:30:31.765955 4763 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="de1b0280-c39f-4e3d-98b9-cdbb0085e6e1" containerName="galera" probeResult="failure" output="command timed out" Dec 06 09:30:31 crc kubenswrapper[4763]: I1206 09:30:31.766649 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="de1b0280-c39f-4e3d-98b9-cdbb0085e6e1" containerName="galera" probeResult="failure" output="command timed out" Dec 06 09:31:42 crc kubenswrapper[4763]: I1206 09:31:42.537338 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:31:42 crc kubenswrapper[4763]: I1206 09:31:42.537886 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:32:12 crc kubenswrapper[4763]: I1206 09:32:12.536952 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:32:12 crc kubenswrapper[4763]: I1206 09:32:12.537495 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:32:42 crc kubenswrapper[4763]: I1206 09:32:42.536496 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:32:42 crc kubenswrapper[4763]: I1206 09:32:42.537096 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:32:42 crc kubenswrapper[4763]: I1206 09:32:42.537152 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:32:42 crc kubenswrapper[4763]: I1206 09:32:42.538023 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:32:42 crc kubenswrapper[4763]: I1206 09:32:42.538079 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272" gracePeriod=600 Dec 06 09:32:43 crc kubenswrapper[4763]: I1206 09:32:43.091468 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272" exitCode=0 Dec 06 09:32:43 crc kubenswrapper[4763]: I1206 09:32:43.091627 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272"} Dec 06 09:32:43 crc kubenswrapper[4763]: I1206 09:32:43.091803 4763 scope.go:117] "RemoveContainer" containerID="e42d8b55b604960f74117d88f38bb5004f9abd941f56457b8e6baa236c0b2edd" Dec 06 09:32:44 crc kubenswrapper[4763]: I1206 09:32:44.104333 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13"} Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.379099 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:14 crc kubenswrapper[4763]: E1206 09:33:14.380038 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2408031a-7cf2-4f70-9255-686ebfce6329" containerName="collect-profiles" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.380054 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2408031a-7cf2-4f70-9255-686ebfce6329" containerName="collect-profiles" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.380312 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2408031a-7cf2-4f70-9255-686ebfce6329" containerName="collect-profiles" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.381897 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.406499 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.522415 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcfrj\" (UniqueName: \"kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.522491 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.522525 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.625798 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcfrj\" (UniqueName: \"kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.625862 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.625888 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.626464 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.626484 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:14 crc kubenswrapper[4763]: I1206 09:33:14.873007 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcfrj\" (UniqueName: \"kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj\") pod \"certified-operators-rp8g6\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:15 crc kubenswrapper[4763]: I1206 09:33:15.005421 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:15 crc kubenswrapper[4763]: I1206 09:33:15.480722 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.427129 4763 generic.go:334] "Generic (PLEG): container finished" podID="5217665e-c670-4736-b153-edf241c104fc" containerID="24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0" exitCode=0 Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.427203 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerDied","Data":"24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0"} Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.427518 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerStarted","Data":"6c06b44bd7f44a2dabd68442e189585c5d3ae04fa684b94251d49f171baf6af6"} Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.429118 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.770157 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.772884 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.782360 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.876083 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.876431 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmj6x\" (UniqueName: \"kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.876678 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.979187 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.979292 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.979405 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmj6x\" (UniqueName: \"kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.979713 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:16 crc kubenswrapper[4763]: I1206 09:33:16.979824 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:17 crc kubenswrapper[4763]: I1206 09:33:16.999980 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmj6x\" (UniqueName: \"kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x\") pod \"redhat-marketplace-6gx8c\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:17 crc kubenswrapper[4763]: I1206 09:33:17.097668 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:17 crc kubenswrapper[4763]: I1206 09:33:17.441157 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerStarted","Data":"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824"} Dec 06 09:33:17 crc kubenswrapper[4763]: I1206 09:33:17.639695 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:18 crc kubenswrapper[4763]: I1206 09:33:18.450611 4763 generic.go:334] "Generic (PLEG): container finished" podID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerID="cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017" exitCode=0 Dec 06 09:33:18 crc kubenswrapper[4763]: I1206 09:33:18.450767 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerDied","Data":"cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017"} Dec 06 09:33:18 crc kubenswrapper[4763]: I1206 09:33:18.450982 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerStarted","Data":"be4629a0aa6d674d38210714d0a87cbc18f9345d88a908ef74ac1bca5687fc00"} Dec 06 09:33:18 crc kubenswrapper[4763]: I1206 09:33:18.453855 4763 generic.go:334] "Generic (PLEG): container finished" podID="5217665e-c670-4736-b153-edf241c104fc" containerID="526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824" exitCode=0 Dec 06 09:33:18 crc kubenswrapper[4763]: I1206 09:33:18.453884 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerDied","Data":"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824"} Dec 06 09:33:19 crc kubenswrapper[4763]: I1206 09:33:19.465374 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerStarted","Data":"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a"} Dec 06 09:33:19 crc kubenswrapper[4763]: I1206 09:33:19.467583 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerStarted","Data":"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331"} Dec 06 09:33:19 crc kubenswrapper[4763]: I1206 09:33:19.508412 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rp8g6" podStartSLOduration=3.068792436 podStartE2EDuration="5.508380069s" podCreationTimestamp="2025-12-06 09:33:14 +0000 UTC" firstStartedPulling="2025-12-06 09:33:16.428845468 +0000 UTC m=+4879.004550506" lastFinishedPulling="2025-12-06 09:33:18.868433101 +0000 UTC m=+4881.444138139" observedRunningTime="2025-12-06 09:33:19.504996017 +0000 UTC m=+4882.080701075" watchObservedRunningTime="2025-12-06 09:33:19.508380069 +0000 UTC m=+4882.084085117" Dec 06 09:33:20 crc kubenswrapper[4763]: I1206 09:33:20.481849 4763 generic.go:334] "Generic (PLEG): container finished" podID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerID="244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a" exitCode=0 Dec 06 09:33:20 crc kubenswrapper[4763]: I1206 09:33:20.482415 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerDied","Data":"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a"} Dec 06 09:33:21 crc kubenswrapper[4763]: I1206 09:33:21.504641 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerStarted","Data":"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0"} Dec 06 09:33:21 crc kubenswrapper[4763]: I1206 09:33:21.540852 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6gx8c" podStartSLOduration=2.997566053 podStartE2EDuration="5.540835067s" podCreationTimestamp="2025-12-06 09:33:16 +0000 UTC" firstStartedPulling="2025-12-06 09:33:18.452490319 +0000 UTC m=+4881.028195357" lastFinishedPulling="2025-12-06 09:33:20.995759333 +0000 UTC m=+4883.571464371" observedRunningTime="2025-12-06 09:33:21.536223852 +0000 UTC m=+4884.111928890" watchObservedRunningTime="2025-12-06 09:33:21.540835067 +0000 UTC m=+4884.116540105" Dec 06 09:33:25 crc kubenswrapper[4763]: I1206 09:33:25.006805 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:25 crc kubenswrapper[4763]: I1206 09:33:25.008329 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:25 crc kubenswrapper[4763]: I1206 09:33:25.051685 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:25 crc kubenswrapper[4763]: I1206 09:33:25.596465 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:26 crc kubenswrapper[4763]: I1206 09:33:26.191677 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:27 crc kubenswrapper[4763]: I1206 09:33:27.098795 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:27 crc kubenswrapper[4763]: I1206 09:33:27.099212 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:27 crc kubenswrapper[4763]: I1206 09:33:27.143932 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:27 crc kubenswrapper[4763]: I1206 09:33:27.553298 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rp8g6" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="registry-server" containerID="cri-o://8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331" gracePeriod=2 Dec 06 09:33:27 crc kubenswrapper[4763]: I1206 09:33:27.610718 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.024404 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.120106 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities\") pod \"5217665e-c670-4736-b153-edf241c104fc\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.120244 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcfrj\" (UniqueName: \"kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj\") pod \"5217665e-c670-4736-b153-edf241c104fc\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.120389 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content\") pod \"5217665e-c670-4736-b153-edf241c104fc\" (UID: \"5217665e-c670-4736-b153-edf241c104fc\") " Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.120763 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities" (OuterVolumeSpecName: "utilities") pod "5217665e-c670-4736-b153-edf241c104fc" (UID: "5217665e-c670-4736-b153-edf241c104fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.126950 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj" (OuterVolumeSpecName: "kube-api-access-fcfrj") pod "5217665e-c670-4736-b153-edf241c104fc" (UID: "5217665e-c670-4736-b153-edf241c104fc"). InnerVolumeSpecName "kube-api-access-fcfrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.190183 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5217665e-c670-4736-b153-edf241c104fc" (UID: "5217665e-c670-4736-b153-edf241c104fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.224245 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcfrj\" (UniqueName: \"kubernetes.io/projected/5217665e-c670-4736-b153-edf241c104fc-kube-api-access-fcfrj\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.224314 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.224326 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5217665e-c670-4736-b153-edf241c104fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.563542 4763 generic.go:334] "Generic (PLEG): container finished" podID="5217665e-c670-4736-b153-edf241c104fc" containerID="8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331" exitCode=0 Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.563598 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerDied","Data":"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331"} Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.563640 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rp8g6" event={"ID":"5217665e-c670-4736-b153-edf241c104fc","Type":"ContainerDied","Data":"6c06b44bd7f44a2dabd68442e189585c5d3ae04fa684b94251d49f171baf6af6"} Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.563662 4763 scope.go:117] "RemoveContainer" containerID="8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.564591 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rp8g6" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.600096 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.600474 4763 scope.go:117] "RemoveContainer" containerID="526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.610049 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rp8g6"] Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.621541 4763 scope.go:117] "RemoveContainer" containerID="24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.668597 4763 scope.go:117] "RemoveContainer" containerID="8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331" Dec 06 09:33:28 crc kubenswrapper[4763]: E1206 09:33:28.669172 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331\": container with ID starting with 8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331 not found: ID does not exist" containerID="8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.669243 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331"} err="failed to get container status \"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331\": rpc error: code = NotFound desc = could not find container \"8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331\": container with ID starting with 8426f8142b44c924b01a350d30ac9b97a9a932a794901a2b0e9ef3146ef02331 not found: ID does not exist" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.669285 4763 scope.go:117] "RemoveContainer" containerID="526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824" Dec 06 09:33:28 crc kubenswrapper[4763]: E1206 09:33:28.669709 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824\": container with ID starting with 526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824 not found: ID does not exist" containerID="526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.669794 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824"} err="failed to get container status \"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824\": rpc error: code = NotFound desc = could not find container \"526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824\": container with ID starting with 526ab3bea4f2c7e3fdfe5d4406f94f4f082a0c5b897399dd6e17636e25061824 not found: ID does not exist" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.669872 4763 scope.go:117] "RemoveContainer" containerID="24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0" Dec 06 09:33:28 crc kubenswrapper[4763]: E1206 09:33:28.670194 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0\": container with ID starting with 24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0 not found: ID does not exist" containerID="24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.670221 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0"} err="failed to get container status \"24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0\": rpc error: code = NotFound desc = could not find container \"24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0\": container with ID starting with 24a182d56813aae3d26a181f017a16510cabe295d14f24f713cfda11356399e0 not found: ID does not exist" Dec 06 09:33:28 crc kubenswrapper[4763]: I1206 09:33:28.963489 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:29 crc kubenswrapper[4763]: I1206 09:33:29.736586 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5217665e-c670-4736-b153-edf241c104fc" path="/var/lib/kubelet/pods/5217665e-c670-4736-b153-edf241c104fc/volumes" Dec 06 09:33:30 crc kubenswrapper[4763]: I1206 09:33:30.581096 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6gx8c" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="registry-server" containerID="cri-o://ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0" gracePeriod=2 Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.092008 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.182209 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities\") pod \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.182399 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmj6x\" (UniqueName: \"kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x\") pod \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.182442 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content\") pod \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\" (UID: \"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8\") " Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.182840 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities" (OuterVolumeSpecName: "utilities") pod "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" (UID: "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.183056 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.188544 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x" (OuterVolumeSpecName: "kube-api-access-dmj6x") pod "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" (UID: "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8"). InnerVolumeSpecName "kube-api-access-dmj6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.200563 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" (UID: "a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.284866 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmj6x\" (UniqueName: \"kubernetes.io/projected/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-kube-api-access-dmj6x\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.284931 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.592692 4763 generic.go:334] "Generic (PLEG): container finished" podID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerID="ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0" exitCode=0 Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.592736 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6gx8c" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.592744 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerDied","Data":"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0"} Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.592783 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6gx8c" event={"ID":"a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8","Type":"ContainerDied","Data":"be4629a0aa6d674d38210714d0a87cbc18f9345d88a908ef74ac1bca5687fc00"} Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.592806 4763 scope.go:117] "RemoveContainer" containerID="ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.621771 4763 scope.go:117] "RemoveContainer" containerID="244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.629084 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.644217 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6gx8c"] Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.648487 4763 scope.go:117] "RemoveContainer" containerID="cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.687812 4763 scope.go:117] "RemoveContainer" containerID="ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0" Dec 06 09:33:31 crc kubenswrapper[4763]: E1206 09:33:31.688428 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0\": container with ID starting with ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0 not found: ID does not exist" containerID="ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.688620 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0"} err="failed to get container status \"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0\": rpc error: code = NotFound desc = could not find container \"ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0\": container with ID starting with ee0aac5f010dbafa41ee0a798aec1530f1d421faf445ff1d42d742231c9cdee0 not found: ID does not exist" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.688650 4763 scope.go:117] "RemoveContainer" containerID="244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a" Dec 06 09:33:31 crc kubenswrapper[4763]: E1206 09:33:31.689007 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a\": container with ID starting with 244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a not found: ID does not exist" containerID="244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.689042 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a"} err="failed to get container status \"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a\": rpc error: code = NotFound desc = could not find container \"244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a\": container with ID starting with 244157abc671b1df6bd93c25d3cdbe842682b363b28bd7a0ac5c2a47f738af6a not found: ID does not exist" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.689063 4763 scope.go:117] "RemoveContainer" containerID="cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017" Dec 06 09:33:31 crc kubenswrapper[4763]: E1206 09:33:31.689506 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017\": container with ID starting with cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017 not found: ID does not exist" containerID="cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.689537 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017"} err="failed to get container status \"cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017\": rpc error: code = NotFound desc = could not find container \"cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017\": container with ID starting with cf53bffbce3448db2c6b24821aa2c987365add058221bf9197fdff74c97e6017 not found: ID does not exist" Dec 06 09:33:31 crc kubenswrapper[4763]: I1206 09:33:31.734334 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" path="/var/lib/kubelet/pods/a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8/volumes" Dec 06 09:35:12 crc kubenswrapper[4763]: I1206 09:35:12.537593 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:35:12 crc kubenswrapper[4763]: I1206 09:35:12.538221 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:35:42 crc kubenswrapper[4763]: I1206 09:35:42.536682 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:35:42 crc kubenswrapper[4763]: I1206 09:35:42.538077 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:35:54 crc kubenswrapper[4763]: I1206 09:35:54.003452 4763 generic.go:334] "Generic (PLEG): container finished" podID="11d2c295-2754-410c-bda4-4830b20b5ee8" containerID="4986c74c789840e1eac796dadbd387656e0280867ab7df55561200eb2b1c4995" exitCode=1 Dec 06 09:35:54 crc kubenswrapper[4763]: I1206 09:35:54.003527 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"11d2c295-2754-410c-bda4-4830b20b5ee8","Type":"ContainerDied","Data":"4986c74c789840e1eac796dadbd387656e0280867ab7df55561200eb2b1c4995"} Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.396769 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.481718 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482211 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482248 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482272 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482310 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482353 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482421 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482818 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.482911 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9ppw\" (UniqueName: \"kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw\") pod \"11d2c295-2754-410c-bda4-4830b20b5ee8\" (UID: \"11d2c295-2754-410c-bda4-4830b20b5ee8\") " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.483035 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.483361 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data" (OuterVolumeSpecName: "config-data") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.483707 4763 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.483735 4763 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-config-data\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.487972 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw" (OuterVolumeSpecName: "kube-api-access-c9ppw") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "kube-api-access-c9ppw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.501288 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "test-operator-logs") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.508074 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.512145 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.512246 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.514155 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.548329 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "11d2c295-2754-410c-bda4-4830b20b5ee8" (UID: "11d2c295-2754-410c-bda4-4830b20b5ee8"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585531 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585564 4763 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585576 4763 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585610 4763 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585621 4763 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/11d2c295-2754-410c-bda4-4830b20b5ee8-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585632 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9ppw\" (UniqueName: \"kubernetes.io/projected/11d2c295-2754-410c-bda4-4830b20b5ee8-kube-api-access-c9ppw\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.585653 4763 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/11d2c295-2754-410c-bda4-4830b20b5ee8-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.608020 4763 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 06 09:35:55 crc kubenswrapper[4763]: I1206 09:35:55.687477 4763 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 06 09:35:56 crc kubenswrapper[4763]: I1206 09:35:56.022783 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"11d2c295-2754-410c-bda4-4830b20b5ee8","Type":"ContainerDied","Data":"5be96821446e609f13a3ae0368f52d2f4931920eedf06e160bd3098c9b541bf9"} Dec 06 09:35:56 crc kubenswrapper[4763]: I1206 09:35:56.022838 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 06 09:35:56 crc kubenswrapper[4763]: I1206 09:35:56.022842 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be96821446e609f13a3ae0368f52d2f4931920eedf06e160bd3098c9b541bf9" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.787624 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788610 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788628 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788639 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788645 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788665 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="extract-content" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788671 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="extract-content" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788694 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d2c295-2754-410c-bda4-4830b20b5ee8" containerName="tempest-tests-tempest-tests-runner" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788700 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d2c295-2754-410c-bda4-4830b20b5ee8" containerName="tempest-tests-tempest-tests-runner" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788714 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="extract-utilities" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788720 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="extract-utilities" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788739 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="extract-utilities" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788745 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="extract-utilities" Dec 06 09:36:07 crc kubenswrapper[4763]: E1206 09:36:07.788754 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="extract-content" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788760 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="extract-content" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788962 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5e4df8e-b629-414e-9a88-ecdbbbd9c6c8" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.788972 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d2c295-2754-410c-bda4-4830b20b5ee8" containerName="tempest-tests-tempest-tests-runner" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.789001 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="5217665e-c670-4736-b153-edf241c104fc" containerName="registry-server" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.789648 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.790723 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.822696 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-htlmh" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.925037 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:07 crc kubenswrapper[4763]: I1206 09:36:07.925184 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc2dj\" (UniqueName: \"kubernetes.io/projected/67d2c057-39b6-4cec-b2f5-cb62fccf3b72-kube-api-access-dc2dj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.027230 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc2dj\" (UniqueName: \"kubernetes.io/projected/67d2c057-39b6-4cec-b2f5-cb62fccf3b72-kube-api-access-dc2dj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.027394 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.027806 4763 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.166269 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc2dj\" (UniqueName: \"kubernetes.io/projected/67d2c057-39b6-4cec-b2f5-cb62fccf3b72-kube-api-access-dc2dj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.393027 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"67d2c057-39b6-4cec-b2f5-cb62fccf3b72\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.445064 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 06 09:36:08 crc kubenswrapper[4763]: I1206 09:36:08.956705 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 06 09:36:09 crc kubenswrapper[4763]: I1206 09:36:09.259513 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"67d2c057-39b6-4cec-b2f5-cb62fccf3b72","Type":"ContainerStarted","Data":"016b2744c42d04b21a6e3b299526452c7f1690ab99effd6d6c4dc8bff8a95133"} Dec 06 09:36:10 crc kubenswrapper[4763]: I1206 09:36:10.269286 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"67d2c057-39b6-4cec-b2f5-cb62fccf3b72","Type":"ContainerStarted","Data":"acc2bb6dbf3c3fe4ae9af3ad2b2b92e2b78a8145de96cf40ca58d64940d526ea"} Dec 06 09:36:10 crc kubenswrapper[4763]: I1206 09:36:10.288847 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.428827467 podStartE2EDuration="3.288824973s" podCreationTimestamp="2025-12-06 09:36:07 +0000 UTC" firstStartedPulling="2025-12-06 09:36:08.955842123 +0000 UTC m=+5051.531547161" lastFinishedPulling="2025-12-06 09:36:09.815839629 +0000 UTC m=+5052.391544667" observedRunningTime="2025-12-06 09:36:10.283271633 +0000 UTC m=+5052.858976671" watchObservedRunningTime="2025-12-06 09:36:10.288824973 +0000 UTC m=+5052.864530031" Dec 06 09:36:12 crc kubenswrapper[4763]: I1206 09:36:12.537276 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:36:12 crc kubenswrapper[4763]: I1206 09:36:12.537782 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:36:12 crc kubenswrapper[4763]: I1206 09:36:12.537870 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:36:12 crc kubenswrapper[4763]: I1206 09:36:12.539257 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:36:12 crc kubenswrapper[4763]: I1206 09:36:12.539391 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" gracePeriod=600 Dec 06 09:36:12 crc kubenswrapper[4763]: E1206 09:36:12.668198 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:36:13 crc kubenswrapper[4763]: I1206 09:36:13.297002 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" exitCode=0 Dec 06 09:36:13 crc kubenswrapper[4763]: I1206 09:36:13.297050 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13"} Dec 06 09:36:13 crc kubenswrapper[4763]: I1206 09:36:13.297082 4763 scope.go:117] "RemoveContainer" containerID="0833aaaf4268f1282ce4e762ac5e42bf0c3fc97bb43a56b1c3f20a1163f9b272" Dec 06 09:36:13 crc kubenswrapper[4763]: I1206 09:36:13.297916 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:36:13 crc kubenswrapper[4763]: E1206 09:36:13.298201 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:36:23 crc kubenswrapper[4763]: I1206 09:36:23.719444 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:36:23 crc kubenswrapper[4763]: E1206 09:36:23.720250 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:36:34 crc kubenswrapper[4763]: I1206 09:36:34.720225 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:36:34 crc kubenswrapper[4763]: E1206 09:36:34.720994 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.543866 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4mlfj/must-gather-t5s9m"] Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.546128 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.556282 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4mlfj"/"openshift-service-ca.crt" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.556361 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4mlfj"/"default-dockercfg-dwl7l" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.556430 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4mlfj"/"kube-root-ca.crt" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.560629 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4mlfj/must-gather-t5s9m"] Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.663804 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqpmt\" (UniqueName: \"kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.664076 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.766123 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqpmt\" (UniqueName: \"kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.766217 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.766756 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.794587 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqpmt\" (UniqueName: \"kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt\") pod \"must-gather-t5s9m\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:42 crc kubenswrapper[4763]: I1206 09:36:42.894912 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:36:43 crc kubenswrapper[4763]: I1206 09:36:43.504654 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4mlfj/must-gather-t5s9m"] Dec 06 09:36:43 crc kubenswrapper[4763]: I1206 09:36:43.596151 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" event={"ID":"2af453e7-c07e-4145-89c9-f07fa56af62e","Type":"ContainerStarted","Data":"0765d24dacbd84915d2796eabd8abc0c9b88aabb77410deba6f2d1ce17fd24e9"} Dec 06 09:36:49 crc kubenswrapper[4763]: I1206 09:36:49.720635 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:36:49 crc kubenswrapper[4763]: E1206 09:36:49.721600 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:36:50 crc kubenswrapper[4763]: I1206 09:36:50.659189 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" event={"ID":"2af453e7-c07e-4145-89c9-f07fa56af62e","Type":"ContainerStarted","Data":"e4374988fe703db3f03523a19663cf95e2d76164b0c8946e430ade21551e68d0"} Dec 06 09:36:51 crc kubenswrapper[4763]: I1206 09:36:51.670130 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" event={"ID":"2af453e7-c07e-4145-89c9-f07fa56af62e","Type":"ContainerStarted","Data":"8e7bac947e49b3b07a457acb0a1f74f4ae9abb2b82614de5b25d6154d78f2139"} Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.349296 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" podStartSLOduration=5.668586415 podStartE2EDuration="12.349264771s" podCreationTimestamp="2025-12-06 09:36:42 +0000 UTC" firstStartedPulling="2025-12-06 09:36:43.510840379 +0000 UTC m=+5086.086545417" lastFinishedPulling="2025-12-06 09:36:50.191518735 +0000 UTC m=+5092.767223773" observedRunningTime="2025-12-06 09:36:51.69475297 +0000 UTC m=+5094.270457998" watchObservedRunningTime="2025-12-06 09:36:54.349264771 +0000 UTC m=+5096.924969809" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.360310 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-dv522"] Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.363625 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.506078 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.506446 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4rhh\" (UniqueName: \"kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.607998 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4rhh\" (UniqueName: \"kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.608175 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.608253 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.645667 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4rhh\" (UniqueName: \"kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh\") pod \"crc-debug-dv522\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: I1206 09:36:54.686111 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:36:54 crc kubenswrapper[4763]: W1206 09:36:54.718024 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09210b9e_254d_48db_9620_1ba3110017ed.slice/crio-6cfbea36eddf153474bd49add86a09c0338c5b49f3beff8c2173c90c5a829aeb WatchSource:0}: Error finding container 6cfbea36eddf153474bd49add86a09c0338c5b49f3beff8c2173c90c5a829aeb: Status 404 returned error can't find the container with id 6cfbea36eddf153474bd49add86a09c0338c5b49f3beff8c2173c90c5a829aeb Dec 06 09:36:55 crc kubenswrapper[4763]: I1206 09:36:55.705712 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-dv522" event={"ID":"09210b9e-254d-48db-9620-1ba3110017ed","Type":"ContainerStarted","Data":"6cfbea36eddf153474bd49add86a09c0338c5b49f3beff8c2173c90c5a829aeb"} Dec 06 09:37:00 crc kubenswrapper[4763]: I1206 09:37:00.719770 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:37:00 crc kubenswrapper[4763]: E1206 09:37:00.720659 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:37:06 crc kubenswrapper[4763]: I1206 09:37:06.828674 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-dv522" event={"ID":"09210b9e-254d-48db-9620-1ba3110017ed","Type":"ContainerStarted","Data":"f6e69646f5d14a3961b94d20ab80e68aff7b93c19b2225c123254244804fb4b5"} Dec 06 09:37:06 crc kubenswrapper[4763]: I1206 09:37:06.849140 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4mlfj/crc-debug-dv522" podStartSLOduration=1.178963435 podStartE2EDuration="12.849121847s" podCreationTimestamp="2025-12-06 09:36:54 +0000 UTC" firstStartedPulling="2025-12-06 09:36:54.721704396 +0000 UTC m=+5097.297409434" lastFinishedPulling="2025-12-06 09:37:06.391862808 +0000 UTC m=+5108.967567846" observedRunningTime="2025-12-06 09:37:06.844384929 +0000 UTC m=+5109.420089967" watchObservedRunningTime="2025-12-06 09:37:06.849121847 +0000 UTC m=+5109.424826885" Dec 06 09:37:15 crc kubenswrapper[4763]: I1206 09:37:15.722291 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:37:15 crc kubenswrapper[4763]: E1206 09:37:15.723388 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:37:27 crc kubenswrapper[4763]: I1206 09:37:27.727065 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:37:27 crc kubenswrapper[4763]: E1206 09:37:27.727878 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:37:40 crc kubenswrapper[4763]: I1206 09:37:40.720625 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:37:40 crc kubenswrapper[4763]: E1206 09:37:40.721475 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:37:51 crc kubenswrapper[4763]: I1206 09:37:51.719871 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:37:51 crc kubenswrapper[4763]: E1206 09:37:51.720971 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:37:55 crc kubenswrapper[4763]: I1206 09:37:55.259706 4763 generic.go:334] "Generic (PLEG): container finished" podID="09210b9e-254d-48db-9620-1ba3110017ed" containerID="f6e69646f5d14a3961b94d20ab80e68aff7b93c19b2225c123254244804fb4b5" exitCode=0 Dec 06 09:37:55 crc kubenswrapper[4763]: I1206 09:37:55.260227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-dv522" event={"ID":"09210b9e-254d-48db-9620-1ba3110017ed","Type":"ContainerDied","Data":"f6e69646f5d14a3961b94d20ab80e68aff7b93c19b2225c123254244804fb4b5"} Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.397571 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.437601 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-dv522"] Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.446553 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-dv522"] Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.531297 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4rhh\" (UniqueName: \"kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh\") pod \"09210b9e-254d-48db-9620-1ba3110017ed\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.531378 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host\") pod \"09210b9e-254d-48db-9620-1ba3110017ed\" (UID: \"09210b9e-254d-48db-9620-1ba3110017ed\") " Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.531615 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host" (OuterVolumeSpecName: "host") pod "09210b9e-254d-48db-9620-1ba3110017ed" (UID: "09210b9e-254d-48db-9620-1ba3110017ed"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.532082 4763 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09210b9e-254d-48db-9620-1ba3110017ed-host\") on node \"crc\" DevicePath \"\"" Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.545683 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh" (OuterVolumeSpecName: "kube-api-access-m4rhh") pod "09210b9e-254d-48db-9620-1ba3110017ed" (UID: "09210b9e-254d-48db-9620-1ba3110017ed"). InnerVolumeSpecName "kube-api-access-m4rhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:37:56 crc kubenswrapper[4763]: I1206 09:37:56.634274 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4rhh\" (UniqueName: \"kubernetes.io/projected/09210b9e-254d-48db-9620-1ba3110017ed-kube-api-access-m4rhh\") on node \"crc\" DevicePath \"\"" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.280233 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cfbea36eddf153474bd49add86a09c0338c5b49f3beff8c2173c90c5a829aeb" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.280301 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-dv522" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.597201 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qmm9t"] Dec 06 09:37:57 crc kubenswrapper[4763]: E1206 09:37:57.597624 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09210b9e-254d-48db-9620-1ba3110017ed" containerName="container-00" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.597641 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="09210b9e-254d-48db-9620-1ba3110017ed" containerName="container-00" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.597870 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="09210b9e-254d-48db-9620-1ba3110017ed" containerName="container-00" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.598582 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.652539 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.652635 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmsqb\" (UniqueName: \"kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.734340 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09210b9e-254d-48db-9620-1ba3110017ed" path="/var/lib/kubelet/pods/09210b9e-254d-48db-9620-1ba3110017ed/volumes" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.754209 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.754275 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmsqb\" (UniqueName: \"kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.754386 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.791529 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmsqb\" (UniqueName: \"kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb\") pod \"crc-debug-qmm9t\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:57 crc kubenswrapper[4763]: I1206 09:37:57.927102 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:37:58 crc kubenswrapper[4763]: I1206 09:37:58.289335 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" event={"ID":"a7851d7f-3168-4cdd-affd-9ac931e8c8d2","Type":"ContainerStarted","Data":"47a0225bdce9bc37f0e160c77dfa8266d017867437900b9755cf9424e3e7ca58"} Dec 06 09:37:58 crc kubenswrapper[4763]: I1206 09:37:58.289654 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" event={"ID":"a7851d7f-3168-4cdd-affd-9ac931e8c8d2","Type":"ContainerStarted","Data":"1f3564ae97477d23d6c2c038d32e6859a6445c78737cd1e71b62465bce2da661"} Dec 06 09:37:58 crc kubenswrapper[4763]: I1206 09:37:58.303809 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" podStartSLOduration=1.3037856429999999 podStartE2EDuration="1.303785643s" podCreationTimestamp="2025-12-06 09:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-06 09:37:58.301522461 +0000 UTC m=+5160.877227509" watchObservedRunningTime="2025-12-06 09:37:58.303785643 +0000 UTC m=+5160.879490681" Dec 06 09:37:59 crc kubenswrapper[4763]: I1206 09:37:59.299823 4763 generic.go:334] "Generic (PLEG): container finished" podID="a7851d7f-3168-4cdd-affd-9ac931e8c8d2" containerID="47a0225bdce9bc37f0e160c77dfa8266d017867437900b9755cf9424e3e7ca58" exitCode=0 Dec 06 09:37:59 crc kubenswrapper[4763]: I1206 09:37:59.299968 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" event={"ID":"a7851d7f-3168-4cdd-affd-9ac931e8c8d2","Type":"ContainerDied","Data":"47a0225bdce9bc37f0e160c77dfa8266d017867437900b9755cf9424e3e7ca58"} Dec 06 09:38:00 crc kubenswrapper[4763]: I1206 09:38:00.907292 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.001967 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qmm9t"] Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.014783 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host\") pod \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.015166 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmsqb\" (UniqueName: \"kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb\") pod \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\" (UID: \"a7851d7f-3168-4cdd-affd-9ac931e8c8d2\") " Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.017004 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host" (OuterVolumeSpecName: "host") pod "a7851d7f-3168-4cdd-affd-9ac931e8c8d2" (UID: "a7851d7f-3168-4cdd-affd-9ac931e8c8d2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.019920 4763 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-host\") on node \"crc\" DevicePath \"\"" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.020484 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qmm9t"] Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.044165 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb" (OuterVolumeSpecName: "kube-api-access-xmsqb") pod "a7851d7f-3168-4cdd-affd-9ac931e8c8d2" (UID: "a7851d7f-3168-4cdd-affd-9ac931e8c8d2"). InnerVolumeSpecName "kube-api-access-xmsqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.121762 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmsqb\" (UniqueName: \"kubernetes.io/projected/a7851d7f-3168-4cdd-affd-9ac931e8c8d2-kube-api-access-xmsqb\") on node \"crc\" DevicePath \"\"" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.324784 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f3564ae97477d23d6c2c038d32e6859a6445c78737cd1e71b62465bce2da661" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.324864 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qmm9t" Dec 06 09:38:01 crc kubenswrapper[4763]: I1206 09:38:01.730296 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7851d7f-3168-4cdd-affd-9ac931e8c8d2" path="/var/lib/kubelet/pods/a7851d7f-3168-4cdd-affd-9ac931e8c8d2/volumes" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.328302 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qcdbp"] Dec 06 09:38:02 crc kubenswrapper[4763]: E1206 09:38:02.329012 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7851d7f-3168-4cdd-affd-9ac931e8c8d2" containerName="container-00" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.329029 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7851d7f-3168-4cdd-affd-9ac931e8c8d2" containerName="container-00" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.329210 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7851d7f-3168-4cdd-affd-9ac931e8c8d2" containerName="container-00" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.329884 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.448229 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n29h\" (UniqueName: \"kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.448337 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.550115 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.550229 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.550289 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n29h\" (UniqueName: \"kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.671029 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n29h\" (UniqueName: \"kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h\") pod \"crc-debug-qcdbp\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: I1206 09:38:02.946414 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:02 crc kubenswrapper[4763]: W1206 09:38:02.982134 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf231dd33_80b1_421d_adf2_63b152b6266e.slice/crio-74fbf3dd2bdbf420cf385d3daef9fa00c9edcb0d15e8f3773fb290f0857e2de0 WatchSource:0}: Error finding container 74fbf3dd2bdbf420cf385d3daef9fa00c9edcb0d15e8f3773fb290f0857e2de0: Status 404 returned error can't find the container with id 74fbf3dd2bdbf420cf385d3daef9fa00c9edcb0d15e8f3773fb290f0857e2de0 Dec 06 09:38:03 crc kubenswrapper[4763]: I1206 09:38:03.343197 4763 generic.go:334] "Generic (PLEG): container finished" podID="f231dd33-80b1-421d-adf2-63b152b6266e" containerID="e821b838dd7a2c0bd84a6b1585e56fc61ba58a18c0cdfe88911d02e472599013" exitCode=0 Dec 06 09:38:03 crc kubenswrapper[4763]: I1206 09:38:03.343272 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" event={"ID":"f231dd33-80b1-421d-adf2-63b152b6266e","Type":"ContainerDied","Data":"e821b838dd7a2c0bd84a6b1585e56fc61ba58a18c0cdfe88911d02e472599013"} Dec 06 09:38:03 crc kubenswrapper[4763]: I1206 09:38:03.343537 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" event={"ID":"f231dd33-80b1-421d-adf2-63b152b6266e","Type":"ContainerStarted","Data":"74fbf3dd2bdbf420cf385d3daef9fa00c9edcb0d15e8f3773fb290f0857e2de0"} Dec 06 09:38:03 crc kubenswrapper[4763]: I1206 09:38:03.385892 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qcdbp"] Dec 06 09:38:03 crc kubenswrapper[4763]: I1206 09:38:03.394891 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4mlfj/crc-debug-qcdbp"] Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.477541 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.589397 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host\") pod \"f231dd33-80b1-421d-adf2-63b152b6266e\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.589636 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n29h\" (UniqueName: \"kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h\") pod \"f231dd33-80b1-421d-adf2-63b152b6266e\" (UID: \"f231dd33-80b1-421d-adf2-63b152b6266e\") " Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.589801 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host" (OuterVolumeSpecName: "host") pod "f231dd33-80b1-421d-adf2-63b152b6266e" (UID: "f231dd33-80b1-421d-adf2-63b152b6266e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.590545 4763 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f231dd33-80b1-421d-adf2-63b152b6266e-host\") on node \"crc\" DevicePath \"\"" Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.596584 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h" (OuterVolumeSpecName: "kube-api-access-6n29h") pod "f231dd33-80b1-421d-adf2-63b152b6266e" (UID: "f231dd33-80b1-421d-adf2-63b152b6266e"). InnerVolumeSpecName "kube-api-access-6n29h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.692363 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n29h\" (UniqueName: \"kubernetes.io/projected/f231dd33-80b1-421d-adf2-63b152b6266e-kube-api-access-6n29h\") on node \"crc\" DevicePath \"\"" Dec 06 09:38:04 crc kubenswrapper[4763]: I1206 09:38:04.719104 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:38:04 crc kubenswrapper[4763]: E1206 09:38:04.719449 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:38:05 crc kubenswrapper[4763]: I1206 09:38:05.360940 4763 scope.go:117] "RemoveContainer" containerID="e821b838dd7a2c0bd84a6b1585e56fc61ba58a18c0cdfe88911d02e472599013" Dec 06 09:38:05 crc kubenswrapper[4763]: I1206 09:38:05.361009 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/crc-debug-qcdbp" Dec 06 09:38:05 crc kubenswrapper[4763]: I1206 09:38:05.731746 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f231dd33-80b1-421d-adf2-63b152b6266e" path="/var/lib/kubelet/pods/f231dd33-80b1-421d-adf2-63b152b6266e/volumes" Dec 06 09:38:17 crc kubenswrapper[4763]: I1206 09:38:17.728450 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:38:17 crc kubenswrapper[4763]: E1206 09:38:17.729245 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:38:31 crc kubenswrapper[4763]: I1206 09:38:31.815087 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5fcff8587b-xnnm8_15432443-8cf1-463b-bc66-1995d774b839/barbican-api/0.log" Dec 06 09:38:31 crc kubenswrapper[4763]: I1206 09:38:31.971432 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5fcff8587b-xnnm8_15432443-8cf1-463b-bc66-1995d774b839/barbican-api-log/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.057468 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8549689586-h89zw_73a66c39-800c-426e-a24b-a95a37280ebd/barbican-keystone-listener/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.165242 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8549689586-h89zw_73a66c39-800c-426e-a24b-a95a37280ebd/barbican-keystone-listener-log/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.277498 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-c9b5f6d4f-l8lbt_5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6/barbican-worker-log/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.310687 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-c9b5f6d4f-l8lbt_5a9d8580-0b68-40d8-915c-1b5dd3ffa9d6/barbican-worker/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.542031 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-xpswr_b775bb6f-096f-4232-b395-664dce5d049b/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.704261 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3c6e79b1-2945-4c24-918e-9a955cfae046/ceilometer-central-agent/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.719214 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:38:32 crc kubenswrapper[4763]: E1206 09:38:32.719578 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.788507 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3c6e79b1-2945-4c24-918e-9a955cfae046/sg-core/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.790739 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3c6e79b1-2945-4c24-918e-9a955cfae046/ceilometer-notification-agent/0.log" Dec 06 09:38:32 crc kubenswrapper[4763]: I1206 09:38:32.836886 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3c6e79b1-2945-4c24-918e-9a955cfae046/proxy-httpd/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.082960 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa/cinder-api-log/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.397498 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_acc25d06-1cc0-4a09-b0d2-5bb9e423f7fa/cinder-api/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.403278 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_34c058b8-cdf5-4041-8667-b39f337a908c/probe/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.562337 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_34c058b8-cdf5-4041-8667-b39f337a908c/cinder-backup/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.656995 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bf9c2023-e5d0-4ad4-975d-ef654ff41dfb/probe/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.675170 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bf9c2023-e5d0-4ad4-975d-ef654ff41dfb/cinder-scheduler/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.947072 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_75dd8383-5523-4f4f-ad1a-d59db9482fa3/probe/0.log" Dec 06 09:38:33 crc kubenswrapper[4763]: I1206 09:38:33.971362 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_75dd8383-5523-4f4f-ad1a-d59db9482fa3/cinder-volume/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.146748 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_edca2cd6-b0e5-4fe8-b53a-a23ab02a568f/cinder-volume/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.189376 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_edca2cd6-b0e5-4fe8-b53a-a23ab02a568f/probe/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.199050 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-526ks_6463dad3-6446-4186-9c4f-39264a7f8679/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.412777 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-4rsd7_aed517d7-adb8-4335-8184-6c55f27dd3b8/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.493544 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858bd97c49-lcnf9_1449acdf-6256-4a8c-8cb5-a4a4d1706d26/init/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.662743 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858bd97c49-lcnf9_1449acdf-6256-4a8c-8cb5-a4a4d1706d26/init/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.758223 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-d4487_8506a201-e6ba-4f5f-b637-e0dccab9caea/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.828469 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-858bd97c49-lcnf9_1449acdf-6256-4a8c-8cb5-a4a4d1706d26/dnsmasq-dns/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.976450 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9/glance-log/0.log" Dec 06 09:38:34 crc kubenswrapper[4763]: I1206 09:38:34.990926 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9dd3ef40-c4d9-4fba-ae96-0ed0a747edd9/glance-httpd/0.log" Dec 06 09:38:35 crc kubenswrapper[4763]: I1206 09:38:35.809917 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a5315b7f-ca1a-48f1-92ad-30f8afcddf16/glance-httpd/0.log" Dec 06 09:38:35 crc kubenswrapper[4763]: I1206 09:38:35.896493 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a5315b7f-ca1a-48f1-92ad-30f8afcddf16/glance-log/0.log" Dec 06 09:38:35 crc kubenswrapper[4763]: I1206 09:38:35.965327 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5b557d69b-qxvcs_dee918b0-2519-402f-881e-052ffd7df1c0/horizon/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.179492 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-8dk67_1fd29f51-61ee-4d62-a135-11d9fbc73a73/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.304210 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-v6h6h_96d557e1-eb4f-4e15-b77c-2d308ddadb17/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.511526 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29416861-n4vcz_9a670801-b117-4ea7-b37b-b28dd79aa1c1/keystone-cron/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.605586 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5b557d69b-qxvcs_dee918b0-2519-402f-881e-052ffd7df1c0/horizon-log/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.693912 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5102f1c3-3d66-40c5-88d3-a1e4b38cfadb/kube-state-metrics/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.902601 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5fd795fc6-gh6s9_ba11fdba-f596-4394-af61-47b7923fc2a6/keystone-api/0.log" Dec 06 09:38:36 crc kubenswrapper[4763]: I1206 09:38:36.972389 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-nw425_48ac6869-b493-4288-9837-9acc1cdc9a90/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:37 crc kubenswrapper[4763]: I1206 09:38:37.922398 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-798696db5c-57lrg_a6cebba0-2a89-4d1d-b35c-811676cd4459/neutron-httpd/0.log" Dec 06 09:38:38 crc kubenswrapper[4763]: I1206 09:38:38.040053 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-4hpmv_fc43b8ce-1630-43f7-975e-fde4062cfc62/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:38 crc kubenswrapper[4763]: I1206 09:38:38.116204 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-798696db5c-57lrg_a6cebba0-2a89-4d1d-b35c-811676cd4459/neutron-api/0.log" Dec 06 09:38:38 crc kubenswrapper[4763]: I1206 09:38:38.651172 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ae8fb3b7-c521-443c-b9cc-6f821a32e8a1/nova-cell0-conductor-conductor/0.log" Dec 06 09:38:38 crc kubenswrapper[4763]: I1206 09:38:38.975837 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_69005ae1-9200-4160-8026-7f672b8c30cc/nova-cell1-conductor-conductor/0.log" Dec 06 09:38:39 crc kubenswrapper[4763]: I1206 09:38:39.309168 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a8059381-d8b3-4ce5-9d33-3a973651b9b3/nova-cell1-novncproxy-novncproxy/0.log" Dec 06 09:38:39 crc kubenswrapper[4763]: I1206 09:38:39.360236 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_781aaaf4-b6c0-4c1e-be97-725cd631e120/nova-api-log/0.log" Dec 06 09:38:39 crc kubenswrapper[4763]: I1206 09:38:39.604331 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-nhr4v_d118117b-51ec-4b2f-ae42-61af6c35ba88/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:39 crc kubenswrapper[4763]: I1206 09:38:39.670068 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_db2b9f5c-c7d4-446f-a749-729c24b6ce44/nova-metadata-log/0.log" Dec 06 09:38:39 crc kubenswrapper[4763]: I1206 09:38:39.691949 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_781aaaf4-b6c0-4c1e-be97-725cd631e120/nova-api-api/0.log" Dec 06 09:38:40 crc kubenswrapper[4763]: I1206 09:38:40.159912 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_a81419ed-3356-4a60-8bfa-b1cb2cfb5080/nova-scheduler-scheduler/0.log" Dec 06 09:38:40 crc kubenswrapper[4763]: I1206 09:38:40.168360 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00c834db-e265-44e2-9915-2be0931014a5/mysql-bootstrap/0.log" Dec 06 09:38:40 crc kubenswrapper[4763]: I1206 09:38:40.335113 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00c834db-e265-44e2-9915-2be0931014a5/mysql-bootstrap/0.log" Dec 06 09:38:40 crc kubenswrapper[4763]: I1206 09:38:40.692692 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00c834db-e265-44e2-9915-2be0931014a5/galera/0.log" Dec 06 09:38:40 crc kubenswrapper[4763]: I1206 09:38:40.818893 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_de1b0280-c39f-4e3d-98b9-cdbb0085e6e1/mysql-bootstrap/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.053948 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_de1b0280-c39f-4e3d-98b9-cdbb0085e6e1/galera/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.073708 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_de1b0280-c39f-4e3d-98b9-cdbb0085e6e1/mysql-bootstrap/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.289248 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1b185411-8ba8-4524-8f1d-e7f69f87dc05/openstackclient/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.316788 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zwkh6_d935db21-d7d0-4f7a-8d65-121e5263c242/openstack-network-exporter/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.582243 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-mvnv6_abd10dfb-5dd9-4271-94aa-60b8fed4ba2b/ovn-controller/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.757491 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hqktq_3afc31ec-e08a-4564-afb9-dda5f891cb5c/ovsdb-server-init/0.log" Dec 06 09:38:41 crc kubenswrapper[4763]: I1206 09:38:41.892269 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_db2b9f5c-c7d4-446f-a749-729c24b6ce44/nova-metadata-metadata/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.009113 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hqktq_3afc31ec-e08a-4564-afb9-dda5f891cb5c/ovsdb-server/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.041430 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hqktq_3afc31ec-e08a-4564-afb9-dda5f891cb5c/ovsdb-server-init/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.377879 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-t5z59_a4b3e774-f3e9-44a0-84aa-730f5a6ae8ee/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.468609 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e80cac8a-fa8e-4e8d-bc78-d3962d6921dc/openstack-network-exporter/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.525624 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hqktq_3afc31ec-e08a-4564-afb9-dda5f891cb5c/ovs-vswitchd/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.678206 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e80cac8a-fa8e-4e8d-bc78-d3962d6921dc/ovn-northd/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.791733 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_678168e1-cecc-486d-b2eb-366c90a302c5/ovsdbserver-nb/0.log" Dec 06 09:38:42 crc kubenswrapper[4763]: I1206 09:38:42.844123 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_678168e1-cecc-486d-b2eb-366c90a302c5/openstack-network-exporter/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.046326 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_23914339-150e-409f-bd6d-7a1c91529a22/ovsdbserver-sb/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.048676 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_23914339-150e-409f-bd6d-7a1c91529a22/openstack-network-exporter/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.410642 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59bf5cd876-p79rt_e3b30be5-5ea4-4c91-a21f-f5d2c48670e7/placement-api/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.504611 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_08ba2fdd-9289-44be-a218-38d34272f2b4/init-config-reloader/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.543816 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59bf5cd876-p79rt_e3b30be5-5ea4-4c91-a21f-f5d2c48670e7/placement-log/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.672074 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_08ba2fdd-9289-44be-a218-38d34272f2b4/config-reloader/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.687748 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_08ba2fdd-9289-44be-a218-38d34272f2b4/init-config-reloader/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.693371 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_08ba2fdd-9289-44be-a218-38d34272f2b4/prometheus/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.804158 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_08ba2fdd-9289-44be-a218-38d34272f2b4/thanos-sidecar/0.log" Dec 06 09:38:43 crc kubenswrapper[4763]: I1206 09:38:43.968818 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9acf66e1-d7d4-4ffb-afc9-c5b82328d606/setup-container/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.216777 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9acf66e1-d7d4-4ffb-afc9-c5b82328d606/rabbitmq/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.223028 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9acf66e1-d7d4-4ffb-afc9-c5b82328d606/setup-container/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.241290 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_e18a4dfa-5953-422a-be11-7ae83ab5ec09/setup-container/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.449148 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_e18a4dfa-5953-422a-be11-7ae83ab5ec09/setup-container/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.464533 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_e18a4dfa-5953-422a-be11-7ae83ab5ec09/rabbitmq/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.526867 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_54a3c00e-d725-43d6-8afa-1a013c737071/setup-container/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.717484 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_54a3c00e-d725-43d6-8afa-1a013c737071/rabbitmq/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.829777 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-lpxgl_fff914be-5b3a-4696-93b9-4d384009f6b6/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:44 crc kubenswrapper[4763]: I1206 09:38:44.834454 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_54a3c00e-d725-43d6-8afa-1a013c737071/setup-container/0.log" Dec 06 09:38:45 crc kubenswrapper[4763]: I1206 09:38:45.017735 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-gdj8q_5e76d5c3-ab36-42b8-ab61-39e14274b162/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:45 crc kubenswrapper[4763]: I1206 09:38:45.051211 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-496t2_61414443-2847-4a81-8bbb-af167c4ff3c6/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:45 crc kubenswrapper[4763]: I1206 09:38:45.530996 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-pkzvh_67504944-9db6-4422-937a-70be47b9a514/ssh-known-hosts-edpm-deployment/0.log" Dec 06 09:38:45 crc kubenswrapper[4763]: I1206 09:38:45.573803 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7v5wn_82b99854-bc09-408a-b477-30156ae38d45/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:45 crc kubenswrapper[4763]: I1206 09:38:45.861774 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-77bbc88767-6qptg_89364500-19a7-4b4f-aa5c-cf8730a63fdd/proxy-server/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.017425 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-crchz_e74906c2-6446-4cb0-a428-61609a969406/swift-ring-rebalance/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.076326 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-77bbc88767-6qptg_89364500-19a7-4b4f-aa5c-cf8730a63fdd/proxy-httpd/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.152728 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/account-auditor/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.217840 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/account-reaper/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.295754 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/account-replicator/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.452057 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/account-server/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.483555 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/container-auditor/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.484494 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/container-server/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.502472 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/container-replicator/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.652461 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/container-updater/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.762091 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/object-replicator/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.797749 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/object-auditor/0.log" Dec 06 09:38:46 crc kubenswrapper[4763]: I1206 09:38:46.814229 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/object-expirer/0.log" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.447552 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/swift-recon-cron/0.log" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.480568 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/rsync/0.log" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.517544 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/object-server/0.log" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.519239 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_df22632a-c5cb-4636-abfe-48f60e1df901/object-updater/0.log" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.730339 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:38:47 crc kubenswrapper[4763]: E1206 09:38:47.730645 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:38:47 crc kubenswrapper[4763]: I1206 09:38:47.809330 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-h9b7z_b51e4fc2-25a8-4d25-bb8e-8cbaf9cf7e59/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:48 crc kubenswrapper[4763]: I1206 09:38:48.046741 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_67d2c057-39b6-4cec-b2f5-cb62fccf3b72/test-operator-logs-container/0.log" Dec 06 09:38:48 crc kubenswrapper[4763]: I1206 09:38:48.185538 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-z2j76_fecaf57c-7b83-41f2-a99c-001dd99c72d6/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 06 09:38:48 crc kubenswrapper[4763]: I1206 09:38:48.657630 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_11d2c295-2754-410c-bda4-4830b20b5ee8/tempest-tests-tempest-tests-runner/0.log" Dec 06 09:38:49 crc kubenswrapper[4763]: I1206 09:38:49.399607 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_95fb7538-cee1-4ee9-948e-648cf0070047/watcher-applier/0.log" Dec 06 09:38:49 crc kubenswrapper[4763]: I1206 09:38:49.537404 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b699a6d2-a0ce-4be7-9173-524d485cbd89/memcached/0.log" Dec 06 09:38:49 crc kubenswrapper[4763]: I1206 09:38:49.661297 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_50364316-df96-4310-8365-1226050a1a58/watcher-api-log/0.log" Dec 06 09:38:51 crc kubenswrapper[4763]: I1206 09:38:51.882257 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_b2180bb1-fd83-4725-be6e-be8c0a976e5b/watcher-decision-engine/0.log" Dec 06 09:38:52 crc kubenswrapper[4763]: I1206 09:38:52.555252 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_50364316-df96-4310-8365-1226050a1a58/watcher-api/0.log" Dec 06 09:38:58 crc kubenswrapper[4763]: I1206 09:38:58.720047 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:38:58 crc kubenswrapper[4763]: E1206 09:38:58.721862 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:39:12 crc kubenswrapper[4763]: I1206 09:39:12.719505 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:39:12 crc kubenswrapper[4763]: E1206 09:39:12.721773 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.519630 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/util/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.725596 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/util/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.731476 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/pull/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.764839 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/pull/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.914018 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/pull/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.927750 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/util/0.log" Dec 06 09:39:17 crc kubenswrapper[4763]: I1206 09:39:17.936096 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4da8fd0f31efa7a038617bff710fdb35a0346fbe7ddbed6f4c0b4c9e4ek44t2_91194b81-a09e-491c-b66f-7d8d7628065d/extract/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.086729 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-s7cj4_ff46659d-6be0-4f7b-81a8-f8de0b6331ae/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.164704 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-s7cj4_ff46659d-6be0-4f7b-81a8-f8de0b6331ae/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.193815 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-t2vdt_bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.365727 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-t2vdt_bd0bd0c1-fede-4b5c-8b33-c49d7c54cbd5/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.368225 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-ngktl_5674dbca-4697-4993-888b-680428fba7ba/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.374940 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-ngktl_5674dbca-4697-4993-888b-680428fba7ba/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.517619 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9ss4_cd89c4f2-cf50-4183-b364-d4886b5369a6/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.619344 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-h9ss4_cd89c4f2-cf50-4183-b364-d4886b5369a6/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.724625 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jjs5h_88e79272-2e99-462a-b29c-b4d2a34ed95b/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.792097 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jjs5h_88e79272-2e99-462a-b29c-b4d2a34ed95b/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.801375 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-dbttk_3d914ebd-1d7f-405f-aa0c-c8b254ec7196/kube-rbac-proxy/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.941220 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-dbttk_3d914ebd-1d7f-405f-aa0c-c8b254ec7196/manager/0.log" Dec 06 09:39:18 crc kubenswrapper[4763]: I1206 09:39:18.996726 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-kj44v_3e707b64-79d0-4401-9401-a80ed24a9658/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.242714 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bvdzz_7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.249796 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bvdzz_7fb871d6-b6b5-4b5f-9ac0-a1c5f528c80b/manager/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.317992 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-kj44v_3e707b64-79d0-4401-9401-a80ed24a9658/manager/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.405328 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nrmpg_2e2c64e0-cee9-47bd-afca-2fadeeb61b01/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.498459 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nrmpg_2e2c64e0-cee9-47bd-afca-2fadeeb61b01/manager/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.573386 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-svncl_1cff2610-ab42-4f8d-8e4a-22218c0f30e0/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.618186 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-svncl_1cff2610-ab42-4f8d-8e4a-22218c0f30e0/manager/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.707798 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rb7dw_a6e1401e-85a2-4477-96d2-58acbc583139/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.810258 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-rb7dw_a6e1401e-85a2-4477-96d2-58acbc583139/manager/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.866602 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-j4fds_3d7a6c13-0b20-44ac-afb5-6d67630877eb/kube-rbac-proxy/0.log" Dec 06 09:39:19 crc kubenswrapper[4763]: I1206 09:39:19.939545 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-j4fds_3d7a6c13-0b20-44ac-afb5-6d67630877eb/manager/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.021328 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-78qkv_ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83/kube-rbac-proxy/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.119278 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-78qkv_ce2bb7ad-0ccc-425c-b2a6-1718f8a5ac83/manager/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.179524 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-h47jh_e91554af-5d2b-4477-be5e-314a9b6e901d/manager/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.247709 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-h47jh_e91554af-5d2b-4477-be5e-314a9b6e901d/kube-rbac-proxy/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.334528 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp_f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c/kube-rbac-proxy/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.371147 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4c9wsp_f9e0900d-f8fd-4cf1-a38a-5b979ec35d5c/manager/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.789728 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8fkl9_a5d6c3cb-d635-4b17-b52a-d20eb3286ac2/registry-server/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.857203 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8557c89b5c-6tcs6_94e8ee83-090b-4636-9953-50d9bf39b2b7/operator/0.log" Dec 06 09:39:20 crc kubenswrapper[4763]: I1206 09:39:20.989084 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dcrcf_c245a4f1-0cf3-4627-ad45-ce24db12fc93/kube-rbac-proxy/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.194134 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dcrcf_c245a4f1-0cf3-4627-ad45-ce24db12fc93/manager/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.363771 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fkptp_43a9b006-b703-46ad-a74b-f00752e25fdc/kube-rbac-proxy/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.455457 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fkptp_43a9b006-b703-46ad-a74b-f00752e25fdc/manager/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.605628 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-dsc64_b9daf37b-2ddd-4324-98d5-ab782c45de9a/operator/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.677936 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8jwdv_1aa32609-8006-42dd-94d3-0340547ed370/kube-rbac-proxy/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.784675 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8jwdv_1aa32609-8006-42dd-94d3-0340547ed370/manager/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.868591 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vzhvh_e4139d53-17de-4e12-a43a-3f571154e203/kube-rbac-proxy/0.log" Dec 06 09:39:21 crc kubenswrapper[4763]: I1206 09:39:21.986884 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5b68f46455-24xn4_c81486b2-5d29-4032-9db8-8f8266846f74/manager/0.log" Dec 06 09:39:22 crc kubenswrapper[4763]: I1206 09:39:22.079836 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zhxph_94ab8de3-6887-460c-a3c0-d0cf4dcf4ead/kube-rbac-proxy/0.log" Dec 06 09:39:22 crc kubenswrapper[4763]: I1206 09:39:22.137256 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-vzhvh_e4139d53-17de-4e12-a43a-3f571154e203/manager/0.log" Dec 06 09:39:22 crc kubenswrapper[4763]: I1206 09:39:22.141446 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zhxph_94ab8de3-6887-460c-a3c0-d0cf4dcf4ead/manager/0.log" Dec 06 09:39:22 crc kubenswrapper[4763]: I1206 09:39:22.274779 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7d48f48f5f-4rgh7_fc8f06be-5292-423d-bba6-e50068054197/kube-rbac-proxy/0.log" Dec 06 09:39:22 crc kubenswrapper[4763]: I1206 09:39:22.383763 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7d48f48f5f-4rgh7_fc8f06be-5292-423d-bba6-e50068054197/manager/0.log" Dec 06 09:39:25 crc kubenswrapper[4763]: I1206 09:39:25.719389 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:39:25 crc kubenswrapper[4763]: E1206 09:39:25.720195 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:39:37 crc kubenswrapper[4763]: I1206 09:39:37.727192 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:39:37 crc kubenswrapper[4763]: E1206 09:39:37.728375 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:39:41 crc kubenswrapper[4763]: I1206 09:39:41.920024 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7d7bl_e35ce166-8cb5-4419-b4db-09f13a65daf2/control-plane-machine-set-operator/0.log" Dec 06 09:39:42 crc kubenswrapper[4763]: I1206 09:39:42.116120 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s62v_0d8b8c08-d283-4ca2-aed3-2fcb7637ac91/machine-api-operator/0.log" Dec 06 09:39:42 crc kubenswrapper[4763]: I1206 09:39:42.126194 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s62v_0d8b8c08-d283-4ca2-aed3-2fcb7637ac91/kube-rbac-proxy/0.log" Dec 06 09:39:50 crc kubenswrapper[4763]: I1206 09:39:50.720113 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:39:50 crc kubenswrapper[4763]: E1206 09:39:50.721036 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:39:54 crc kubenswrapper[4763]: I1206 09:39:54.892338 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-g4fbj_ac2a45ed-d601-4f3c-8594-05810a6bbd89/cert-manager-controller/0.log" Dec 06 09:39:55 crc kubenswrapper[4763]: I1206 09:39:55.076364 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-kdpkl_85545589-5ded-4bd2-a7d8-7ff1a449b321/cert-manager-cainjector/0.log" Dec 06 09:39:55 crc kubenswrapper[4763]: I1206 09:39:55.115830 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-z4wp4_c96e8373-9751-402b-b2bd-d8fc061c18ec/cert-manager-webhook/0.log" Dec 06 09:40:01 crc kubenswrapper[4763]: I1206 09:40:01.719830 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:40:01 crc kubenswrapper[4763]: E1206 09:40:01.720557 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:40:09 crc kubenswrapper[4763]: I1206 09:40:09.677688 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-pxm57_f8a1df64-4620-4fb1-904a-487fed4df908/nmstate-console-plugin/0.log" Dec 06 09:40:09 crc kubenswrapper[4763]: I1206 09:40:09.905251 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-swq67_6dc27b5f-e3e1-4aab-842f-db79e092bf9a/nmstate-handler/0.log" Dec 06 09:40:09 crc kubenswrapper[4763]: I1206 09:40:09.922683 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8p9dx_0f64e800-57db-4061-807d-90160767d69e/kube-rbac-proxy/0.log" Dec 06 09:40:10 crc kubenswrapper[4763]: I1206 09:40:10.008449 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8p9dx_0f64e800-57db-4061-807d-90160767d69e/nmstate-metrics/0.log" Dec 06 09:40:10 crc kubenswrapper[4763]: I1206 09:40:10.105008 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-krqlh_49419955-d174-44cb-ac59-84037352f94f/nmstate-operator/0.log" Dec 06 09:40:10 crc kubenswrapper[4763]: I1206 09:40:10.218541 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-pqqmw_4a2e50c2-eda2-4acc-b454-7b07d430954a/nmstate-webhook/0.log" Dec 06 09:40:15 crc kubenswrapper[4763]: I1206 09:40:15.735661 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:40:15 crc kubenswrapper[4763]: E1206 09:40:15.737858 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.174591 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-ttjcf_bba5d03f-2f3a-48e7-8c8a-dd5531a680b4/kube-rbac-proxy/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.257857 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-ttjcf_bba5d03f-2f3a-48e7-8c8a-dd5531a680b4/controller/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.397785 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-frr-files/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.582276 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-frr-files/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.586954 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-metrics/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.598270 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-reloader/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.629568 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-reloader/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.774355 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-frr-files/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.823245 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-reloader/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.833293 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-metrics/0.log" Dec 06 09:40:24 crc kubenswrapper[4763]: I1206 09:40:24.867621 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-metrics/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.006294 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-frr-files/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.015035 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-reloader/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.017651 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/cp-metrics/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.068553 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/controller/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.191120 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/frr-metrics/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.206567 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/kube-rbac-proxy/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.271578 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/kube-rbac-proxy-frr/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.458592 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/reloader/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.506933 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-ppdsw_891a4eae-46dc-4ae1-bd31-d04889c9647e/frr-k8s-webhook-server/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.672328 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-c6d948bc6-6vpfc_0834bbd3-fa5d-4e25-9c42-0597716b8d60/manager/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.848805 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c5f4f877c-n99dz_97adad42-5533-4a55-81ad-5b98cc51efb7/webhook-server/0.log" Dec 06 09:40:25 crc kubenswrapper[4763]: I1206 09:40:25.997524 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-mkf7x_0446ab0f-7545-4953-b283-4d8edab363f5/kube-rbac-proxy/0.log" Dec 06 09:40:26 crc kubenswrapper[4763]: I1206 09:40:26.583167 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-mkf7x_0446ab0f-7545-4953-b283-4d8edab363f5/speaker/0.log" Dec 06 09:40:26 crc kubenswrapper[4763]: I1206 09:40:26.811020 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4lrrp_3b9d7e71-435f-4f24-9686-436f44603eee/frr/0.log" Dec 06 09:40:27 crc kubenswrapper[4763]: I1206 09:40:27.726500 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:40:27 crc kubenswrapper[4763]: E1206 09:40:27.727043 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.089402 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:33 crc kubenswrapper[4763]: E1206 09:40:33.099079 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f231dd33-80b1-421d-adf2-63b152b6266e" containerName="container-00" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.099118 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f231dd33-80b1-421d-adf2-63b152b6266e" containerName="container-00" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.099356 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f231dd33-80b1-421d-adf2-63b152b6266e" containerName="container-00" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.101230 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.115381 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.217976 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jrxz\" (UniqueName: \"kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.218096 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.218237 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.320349 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.320479 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.320523 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jrxz\" (UniqueName: \"kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.320809 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.320999 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.341001 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jrxz\" (UniqueName: \"kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz\") pod \"community-operators-fxhgj\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:33 crc kubenswrapper[4763]: I1206 09:40:33.434344 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:34 crc kubenswrapper[4763]: I1206 09:40:34.082854 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:35 crc kubenswrapper[4763]: I1206 09:40:35.003331 4763 generic.go:334] "Generic (PLEG): container finished" podID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerID="3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee" exitCode=0 Dec 06 09:40:35 crc kubenswrapper[4763]: I1206 09:40:35.003377 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerDied","Data":"3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee"} Dec 06 09:40:35 crc kubenswrapper[4763]: I1206 09:40:35.003403 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerStarted","Data":"860a03a29a03d55a999bad8eae9cf19e085ffa51a82d14f1fbf61c581d06982f"} Dec 06 09:40:35 crc kubenswrapper[4763]: I1206 09:40:35.007421 4763 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.017026 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerStarted","Data":"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9"} Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.678410 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.682116 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.722462 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.801864 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sln4l\" (UniqueName: \"kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.802163 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.802321 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.904256 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sln4l\" (UniqueName: \"kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.904702 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.905006 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.905407 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.905581 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:36 crc kubenswrapper[4763]: I1206 09:40:36.930195 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sln4l\" (UniqueName: \"kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l\") pod \"redhat-operators-dx7r9\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:37 crc kubenswrapper[4763]: I1206 09:40:37.015152 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:37 crc kubenswrapper[4763]: I1206 09:40:37.027221 4763 generic.go:334] "Generic (PLEG): container finished" podID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerID="a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9" exitCode=0 Dec 06 09:40:37 crc kubenswrapper[4763]: I1206 09:40:37.027270 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerDied","Data":"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9"} Dec 06 09:40:37 crc kubenswrapper[4763]: I1206 09:40:37.569418 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:38 crc kubenswrapper[4763]: I1206 09:40:38.040606 4763 generic.go:334] "Generic (PLEG): container finished" podID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerID="16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb" exitCode=0 Dec 06 09:40:38 crc kubenswrapper[4763]: I1206 09:40:38.040708 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerDied","Data":"16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb"} Dec 06 09:40:38 crc kubenswrapper[4763]: I1206 09:40:38.040951 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerStarted","Data":"db6aa28c4b4b5d5d958028becfdb6062e609b6b6a6b2bc79b23d241fa7191eee"} Dec 06 09:40:38 crc kubenswrapper[4763]: I1206 09:40:38.043214 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerStarted","Data":"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b"} Dec 06 09:40:38 crc kubenswrapper[4763]: I1206 09:40:38.092215 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fxhgj" podStartSLOduration=2.646021348 podStartE2EDuration="5.092193442s" podCreationTimestamp="2025-12-06 09:40:33 +0000 UTC" firstStartedPulling="2025-12-06 09:40:35.007197906 +0000 UTC m=+5317.582902944" lastFinishedPulling="2025-12-06 09:40:37.45337 +0000 UTC m=+5320.029075038" observedRunningTime="2025-12-06 09:40:38.089305854 +0000 UTC m=+5320.665010892" watchObservedRunningTime="2025-12-06 09:40:38.092193442 +0000 UTC m=+5320.667898480" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.497541 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/util/0.log" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.673071 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/util/0.log" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.720804 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:40:39 crc kubenswrapper[4763]: E1206 09:40:39.722040 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.740043 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/pull/0.log" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.752326 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/pull/0.log" Dec 06 09:40:39 crc kubenswrapper[4763]: I1206 09:40:39.963336 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/pull/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.007868 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/util/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.066409 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerStarted","Data":"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e"} Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.078357 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f9vnv8_4c96b09c-5d00-4c2d-bcfb-dee1f80943c0/extract/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.219103 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/util/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.428572 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/pull/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.455327 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/pull/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.570732 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/util/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.591643 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/util/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.795742 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/extract/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.860612 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104jdsm_af427750-c93a-4698-bc6b-a73202bdfeb7/pull/0.log" Dec 06 09:40:40 crc kubenswrapper[4763]: I1206 09:40:40.878146 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/util/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.064397 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/pull/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.119986 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/pull/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.229565 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/util/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.276781 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/util/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.309932 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/extract/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.512730 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-utilities/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.587227 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gxhcm_ffa08e65-01fc-4524-b474-2e1f7193fa69/pull/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.712933 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-content/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.761635 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-content/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.774838 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-utilities/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.952157 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-utilities/0.log" Dec 06 09:40:41 crc kubenswrapper[4763]: I1206 09:40:41.987486 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/extract-content/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.201605 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-utilities/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.565535 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-content/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.578649 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-utilities/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.639048 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-content/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.777196 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-utilities/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.798572 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/extract-content/0.log" Dec 06 09:40:42 crc kubenswrapper[4763]: I1206 09:40:42.826055 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fxhgj_fdaace17-98a2-4524-bdc7-b66f32e9db41/registry-server/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.052343 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-utilities/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.127407 4763 generic.go:334] "Generic (PLEG): container finished" podID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerID="df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e" exitCode=0 Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.127476 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerDied","Data":"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e"} Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.336666 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-content/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.349823 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-utilities/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.359758 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-kvt7x_31406d5a-2fb1-4c58-a333-8decda95ca2a/registry-server/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.391559 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-content/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.435388 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.437002 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.488927 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.528714 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-content/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.563330 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/extract-utilities/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.752382 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-utilities/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.945335 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-content/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.967086 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-utilities/0.log" Dec 06 09:40:43 crc kubenswrapper[4763]: I1206 09:40:43.988511 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-content/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.000423 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-4fsn7_61203368-9fce-4808-ae90-b4a955f5f893/marketplace-operator/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.201374 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.208224 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-content/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.248454 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/extract-utilities/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.449023 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-utilities/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.585231 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg8kh_a6f5ed36-5b72-45e8-8aea-5715275f5f41/registry-server/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.755517 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-content/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.787341 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-utilities/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.801037 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-content/0.log" Dec 06 09:40:44 crc kubenswrapper[4763]: I1206 09:40:44.977694 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r5rgv_29279517-9ad6-4afc-9cfb-a895652124ed/registry-server/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.044288 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-content/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.118312 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dx7r9_f44e88ac-a936-4ad9-a919-05bc099aeb58/extract-utilities/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.152065 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerStarted","Data":"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc"} Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.172647 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dx7r9" podStartSLOduration=3.187323317 podStartE2EDuration="9.172625312s" podCreationTimestamp="2025-12-06 09:40:36 +0000 UTC" firstStartedPulling="2025-12-06 09:40:38.042795145 +0000 UTC m=+5320.618500183" lastFinishedPulling="2025-12-06 09:40:44.02809714 +0000 UTC m=+5326.603802178" observedRunningTime="2025-12-06 09:40:45.170691049 +0000 UTC m=+5327.746396097" watchObservedRunningTime="2025-12-06 09:40:45.172625312 +0000 UTC m=+5327.748330350" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.200778 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-utilities/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.403549 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-content/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.437521 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-utilities/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.461003 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-content/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.656360 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-utilities/0.log" Dec 06 09:40:45 crc kubenswrapper[4763]: I1206 09:40:45.679675 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/extract-content/0.log" Dec 06 09:40:46 crc kubenswrapper[4763]: I1206 09:40:46.338034 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6sjn_7dac1917-a2ac-4485-93ff-011dd58fcab7/registry-server/0.log" Dec 06 09:40:47 crc kubenswrapper[4763]: I1206 09:40:47.015748 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:47 crc kubenswrapper[4763]: I1206 09:40:47.016151 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.061055 4763 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dx7r9" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="registry-server" probeResult="failure" output=< Dec 06 09:40:48 crc kubenswrapper[4763]: timeout: failed to connect service ":50051" within 1s Dec 06 09:40:48 crc kubenswrapper[4763]: > Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.273826 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.274094 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fxhgj" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="registry-server" containerID="cri-o://84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b" gracePeriod=2 Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.752285 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.815420 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content\") pod \"fdaace17-98a2-4524-bdc7-b66f32e9db41\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.815499 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jrxz\" (UniqueName: \"kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz\") pod \"fdaace17-98a2-4524-bdc7-b66f32e9db41\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.815751 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities\") pod \"fdaace17-98a2-4524-bdc7-b66f32e9db41\" (UID: \"fdaace17-98a2-4524-bdc7-b66f32e9db41\") " Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.816506 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities" (OuterVolumeSpecName: "utilities") pod "fdaace17-98a2-4524-bdc7-b66f32e9db41" (UID: "fdaace17-98a2-4524-bdc7-b66f32e9db41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.817966 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.824401 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz" (OuterVolumeSpecName: "kube-api-access-5jrxz") pod "fdaace17-98a2-4524-bdc7-b66f32e9db41" (UID: "fdaace17-98a2-4524-bdc7-b66f32e9db41"). InnerVolumeSpecName "kube-api-access-5jrxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.877813 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdaace17-98a2-4524-bdc7-b66f32e9db41" (UID: "fdaace17-98a2-4524-bdc7-b66f32e9db41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.919852 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdaace17-98a2-4524-bdc7-b66f32e9db41-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:48 crc kubenswrapper[4763]: I1206 09:40:48.919892 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jrxz\" (UniqueName: \"kubernetes.io/projected/fdaace17-98a2-4524-bdc7-b66f32e9db41-kube-api-access-5jrxz\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.200401 4763 generic.go:334] "Generic (PLEG): container finished" podID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerID="84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b" exitCode=0 Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.200455 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerDied","Data":"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b"} Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.200488 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fxhgj" event={"ID":"fdaace17-98a2-4524-bdc7-b66f32e9db41","Type":"ContainerDied","Data":"860a03a29a03d55a999bad8eae9cf19e085ffa51a82d14f1fbf61c581d06982f"} Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.200511 4763 scope.go:117] "RemoveContainer" containerID="84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.200525 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fxhgj" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.249994 4763 scope.go:117] "RemoveContainer" containerID="a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9" Dec 06 09:40:49 crc kubenswrapper[4763]: E1206 09:40:49.253268 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdaace17_98a2_4524_bdc7_b66f32e9db41.slice/crio-860a03a29a03d55a999bad8eae9cf19e085ffa51a82d14f1fbf61c581d06982f\": RecentStats: unable to find data in memory cache]" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.254337 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.266568 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fxhgj"] Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.277747 4763 scope.go:117] "RemoveContainer" containerID="3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.333523 4763 scope.go:117] "RemoveContainer" containerID="84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b" Dec 06 09:40:49 crc kubenswrapper[4763]: E1206 09:40:49.334049 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b\": container with ID starting with 84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b not found: ID does not exist" containerID="84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.334104 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b"} err="failed to get container status \"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b\": rpc error: code = NotFound desc = could not find container \"84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b\": container with ID starting with 84a8c339b83d20fb9cbe05fcf8453b8038b080a6f17029d7cdb62a9f70e2e28b not found: ID does not exist" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.334126 4763 scope.go:117] "RemoveContainer" containerID="a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9" Dec 06 09:40:49 crc kubenswrapper[4763]: E1206 09:40:49.334450 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9\": container with ID starting with a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9 not found: ID does not exist" containerID="a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.334527 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9"} err="failed to get container status \"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9\": rpc error: code = NotFound desc = could not find container \"a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9\": container with ID starting with a5e3af67b7f524e91c7af3be11dcae3c28823a60d2bbaacf906db988e8bca2b9 not found: ID does not exist" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.334597 4763 scope.go:117] "RemoveContainer" containerID="3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee" Dec 06 09:40:49 crc kubenswrapper[4763]: E1206 09:40:49.334877 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee\": container with ID starting with 3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee not found: ID does not exist" containerID="3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.334940 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee"} err="failed to get container status \"3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee\": rpc error: code = NotFound desc = could not find container \"3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee\": container with ID starting with 3e69fff4d6c9f47092264112c98065176be9c53730e12a66595b5ca8993a2bee not found: ID does not exist" Dec 06 09:40:49 crc kubenswrapper[4763]: I1206 09:40:49.732629 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" path="/var/lib/kubelet/pods/fdaace17-98a2-4524-bdc7-b66f32e9db41/volumes" Dec 06 09:40:50 crc kubenswrapper[4763]: I1206 09:40:50.719640 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:40:50 crc kubenswrapper[4763]: E1206 09:40:50.720236 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:40:57 crc kubenswrapper[4763]: I1206 09:40:57.061757 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:57 crc kubenswrapper[4763]: I1206 09:40:57.117557 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:57 crc kubenswrapper[4763]: I1206 09:40:57.299922 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:57 crc kubenswrapper[4763]: I1206 09:40:57.822115 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-8vpz2_e12a3a42-3cdb-490c-86f4-cf0bfbfdde37/prometheus-operator/0.log" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.054711 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5fd6f9594d-6fcrf_fa21f3ec-f2cd-4c50-b2f6-831de68b3e61/prometheus-operator-admission-webhook/0.log" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.073953 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5fd6f9594d-pqcng_45272139-b882-4b1e-a1e9-1b570c6f74ec/prometheus-operator-admission-webhook/0.log" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.254532 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-v6zq6_7cee436d-c942-4862-80b9-ba1633c94c45/operator/0.log" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.279815 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dx7r9" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="registry-server" containerID="cri-o://54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc" gracePeriod=2 Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.327289 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-mm44k_fffa7ba6-3524-4812-8c4c-14d616125be7/perses-operator/0.log" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.797702 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.844501 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities\") pod \"f44e88ac-a936-4ad9-a919-05bc099aeb58\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.844714 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sln4l\" (UniqueName: \"kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l\") pod \"f44e88ac-a936-4ad9-a919-05bc099aeb58\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.844806 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content\") pod \"f44e88ac-a936-4ad9-a919-05bc099aeb58\" (UID: \"f44e88ac-a936-4ad9-a919-05bc099aeb58\") " Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.846248 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities" (OuterVolumeSpecName: "utilities") pod "f44e88ac-a936-4ad9-a919-05bc099aeb58" (UID: "f44e88ac-a936-4ad9-a919-05bc099aeb58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.851373 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l" (OuterVolumeSpecName: "kube-api-access-sln4l") pod "f44e88ac-a936-4ad9-a919-05bc099aeb58" (UID: "f44e88ac-a936-4ad9-a919-05bc099aeb58"). InnerVolumeSpecName "kube-api-access-sln4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.948885 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sln4l\" (UniqueName: \"kubernetes.io/projected/f44e88ac-a936-4ad9-a919-05bc099aeb58-kube-api-access-sln4l\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.948955 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:58 crc kubenswrapper[4763]: I1206 09:40:58.967470 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f44e88ac-a936-4ad9-a919-05bc099aeb58" (UID: "f44e88ac-a936-4ad9-a919-05bc099aeb58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.051349 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44e88ac-a936-4ad9-a919-05bc099aeb58-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.303148 4763 generic.go:334] "Generic (PLEG): container finished" podID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerID="54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc" exitCode=0 Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.303197 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerDied","Data":"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc"} Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.303227 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dx7r9" event={"ID":"f44e88ac-a936-4ad9-a919-05bc099aeb58","Type":"ContainerDied","Data":"db6aa28c4b4b5d5d958028becfdb6062e609b6b6a6b2bc79b23d241fa7191eee"} Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.303250 4763 scope.go:117] "RemoveContainer" containerID="54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.303411 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dx7r9" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.348243 4763 scope.go:117] "RemoveContainer" containerID="df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.374979 4763 scope.go:117] "RemoveContainer" containerID="16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.376202 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.396606 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dx7r9"] Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.420724 4763 scope.go:117] "RemoveContainer" containerID="54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc" Dec 06 09:40:59 crc kubenswrapper[4763]: E1206 09:40:59.421584 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc\": container with ID starting with 54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc not found: ID does not exist" containerID="54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.421648 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc"} err="failed to get container status \"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc\": rpc error: code = NotFound desc = could not find container \"54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc\": container with ID starting with 54c129e71027ef0aae260103ce26f6ba4fc8e24c7ea57600aba1798f0cff85fc not found: ID does not exist" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.421675 4763 scope.go:117] "RemoveContainer" containerID="df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e" Dec 06 09:40:59 crc kubenswrapper[4763]: E1206 09:40:59.422149 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e\": container with ID starting with df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e not found: ID does not exist" containerID="df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.422200 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e"} err="failed to get container status \"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e\": rpc error: code = NotFound desc = could not find container \"df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e\": container with ID starting with df0fce230f12acc12f36de3e741acb56e8bf55ec99e7fba1043238247302ff0e not found: ID does not exist" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.422219 4763 scope.go:117] "RemoveContainer" containerID="16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb" Dec 06 09:40:59 crc kubenswrapper[4763]: E1206 09:40:59.422561 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb\": container with ID starting with 16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb not found: ID does not exist" containerID="16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.422596 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb"} err="failed to get container status \"16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb\": rpc error: code = NotFound desc = could not find container \"16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb\": container with ID starting with 16f2f0b192634a2b9e12f0a93a906b46f7062cc47d1ea64aecb09c1c857880bb not found: ID does not exist" Dec 06 09:40:59 crc kubenswrapper[4763]: E1206 09:40:59.550852 4763 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf44e88ac_a936_4ad9_a919_05bc099aeb58.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf44e88ac_a936_4ad9_a919_05bc099aeb58.slice/crio-db6aa28c4b4b5d5d958028becfdb6062e609b6b6a6b2bc79b23d241fa7191eee\": RecentStats: unable to find data in memory cache]" Dec 06 09:40:59 crc kubenswrapper[4763]: I1206 09:40:59.732565 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" path="/var/lib/kubelet/pods/f44e88ac-a936-4ad9-a919-05bc099aeb58/volumes" Dec 06 09:41:04 crc kubenswrapper[4763]: I1206 09:41:04.719591 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:41:04 crc kubenswrapper[4763]: E1206 09:41:04.720467 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:41:15 crc kubenswrapper[4763]: I1206 09:41:15.719298 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:41:16 crc kubenswrapper[4763]: I1206 09:41:16.498610 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc"} Dec 06 09:41:17 crc kubenswrapper[4763]: E1206 09:41:17.712875 4763 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.18:46206->38.102.83.18:37247: read tcp 38.102.83.18:46206->38.102.83.18:37247: read: connection reset by peer Dec 06 09:42:52 crc kubenswrapper[4763]: I1206 09:42:52.426149 4763 generic.go:334] "Generic (PLEG): container finished" podID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerID="e4374988fe703db3f03523a19663cf95e2d76164b0c8946e430ade21551e68d0" exitCode=0 Dec 06 09:42:52 crc kubenswrapper[4763]: I1206 09:42:52.426215 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" event={"ID":"2af453e7-c07e-4145-89c9-f07fa56af62e","Type":"ContainerDied","Data":"e4374988fe703db3f03523a19663cf95e2d76164b0c8946e430ade21551e68d0"} Dec 06 09:42:52 crc kubenswrapper[4763]: I1206 09:42:52.427596 4763 scope.go:117] "RemoveContainer" containerID="e4374988fe703db3f03523a19663cf95e2d76164b0c8946e430ade21551e68d0" Dec 06 09:42:53 crc kubenswrapper[4763]: I1206 09:42:53.109511 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4mlfj_must-gather-t5s9m_2af453e7-c07e-4145-89c9-f07fa56af62e/gather/0.log" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.142555 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4mlfj/must-gather-t5s9m"] Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.144534 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="copy" containerID="cri-o://8e7bac947e49b3b07a457acb0a1f74f4ae9abb2b82614de5b25d6154d78f2139" gracePeriod=2 Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.155380 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4mlfj/must-gather-t5s9m"] Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.548925 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4mlfj_must-gather-t5s9m_2af453e7-c07e-4145-89c9-f07fa56af62e/copy/0.log" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.549574 4763 generic.go:334] "Generic (PLEG): container finished" podID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerID="8e7bac947e49b3b07a457acb0a1f74f4ae9abb2b82614de5b25d6154d78f2139" exitCode=143 Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.635461 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4mlfj_must-gather-t5s9m_2af453e7-c07e-4145-89c9-f07fa56af62e/copy/0.log" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.636427 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.732862 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output\") pod \"2af453e7-c07e-4145-89c9-f07fa56af62e\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.732975 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqpmt\" (UniqueName: \"kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt\") pod \"2af453e7-c07e-4145-89c9-f07fa56af62e\" (UID: \"2af453e7-c07e-4145-89c9-f07fa56af62e\") " Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.740825 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt" (OuterVolumeSpecName: "kube-api-access-fqpmt") pod "2af453e7-c07e-4145-89c9-f07fa56af62e" (UID: "2af453e7-c07e-4145-89c9-f07fa56af62e"). InnerVolumeSpecName "kube-api-access-fqpmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.836192 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqpmt\" (UniqueName: \"kubernetes.io/projected/2af453e7-c07e-4145-89c9-f07fa56af62e-kube-api-access-fqpmt\") on node \"crc\" DevicePath \"\"" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.914759 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "2af453e7-c07e-4145-89c9-f07fa56af62e" (UID: "2af453e7-c07e-4145-89c9-f07fa56af62e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:43:03 crc kubenswrapper[4763]: I1206 09:43:03.938219 4763 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2af453e7-c07e-4145-89c9-f07fa56af62e-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 06 09:43:04 crc kubenswrapper[4763]: I1206 09:43:04.571716 4763 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4mlfj_must-gather-t5s9m_2af453e7-c07e-4145-89c9-f07fa56af62e/copy/0.log" Dec 06 09:43:04 crc kubenswrapper[4763]: I1206 09:43:04.572231 4763 scope.go:117] "RemoveContainer" containerID="8e7bac947e49b3b07a457acb0a1f74f4ae9abb2b82614de5b25d6154d78f2139" Dec 06 09:43:04 crc kubenswrapper[4763]: I1206 09:43:04.572308 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4mlfj/must-gather-t5s9m" Dec 06 09:43:04 crc kubenswrapper[4763]: I1206 09:43:04.600540 4763 scope.go:117] "RemoveContainer" containerID="e4374988fe703db3f03523a19663cf95e2d76164b0c8946e430ade21551e68d0" Dec 06 09:43:05 crc kubenswrapper[4763]: I1206 09:43:05.730216 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" path="/var/lib/kubelet/pods/2af453e7-c07e-4145-89c9-f07fa56af62e/volumes" Dec 06 09:43:25 crc kubenswrapper[4763]: I1206 09:43:25.266590 4763 scope.go:117] "RemoveContainer" containerID="f6e69646f5d14a3961b94d20ab80e68aff7b93c19b2225c123254244804fb4b5" Dec 06 09:43:42 crc kubenswrapper[4763]: I1206 09:43:42.537488 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:43:42 crc kubenswrapper[4763]: I1206 09:43:42.538198 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:44:12 crc kubenswrapper[4763]: I1206 09:44:12.540632 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:44:12 crc kubenswrapper[4763]: I1206 09:44:12.543114 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:44:25 crc kubenswrapper[4763]: I1206 09:44:25.353369 4763 scope.go:117] "RemoveContainer" containerID="47a0225bdce9bc37f0e160c77dfa8266d017867437900b9755cf9424e3e7ca58" Dec 06 09:44:42 crc kubenswrapper[4763]: I1206 09:44:42.536680 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:44:42 crc kubenswrapper[4763]: I1206 09:44:42.537196 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:44:42 crc kubenswrapper[4763]: I1206 09:44:42.537242 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:44:42 crc kubenswrapper[4763]: I1206 09:44:42.538040 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:44:42 crc kubenswrapper[4763]: I1206 09:44:42.538098 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc" gracePeriod=600 Dec 06 09:44:43 crc kubenswrapper[4763]: I1206 09:44:43.514682 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc" exitCode=0 Dec 06 09:44:43 crc kubenswrapper[4763]: I1206 09:44:43.514723 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc"} Dec 06 09:44:43 crc kubenswrapper[4763]: I1206 09:44:43.515322 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerStarted","Data":"7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2"} Dec 06 09:44:43 crc kubenswrapper[4763]: I1206 09:44:43.515346 4763 scope.go:117] "RemoveContainer" containerID="b3c0209dcf2469ccffd54b31aa2b2f1fdd33c4449a44322f1eab8163cbff7a13" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.272659 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273647 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="extract-utilities" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273666 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="extract-utilities" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273698 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273707 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273726 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="gather" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273737 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="gather" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273757 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="extract-content" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273764 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="extract-content" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273776 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="extract-content" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273783 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="extract-content" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273801 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="copy" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273808 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="copy" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273815 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="extract-utilities" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273822 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="extract-utilities" Dec 06 09:44:55 crc kubenswrapper[4763]: E1206 09:44:55.273839 4763 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.273846 4763 state_mem.go:107] "Deleted CPUSet assignment" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.274130 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="gather" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.274151 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdaace17-98a2-4524-bdc7-b66f32e9db41" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.274172 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af453e7-c07e-4145-89c9-f07fa56af62e" containerName="copy" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.274181 4763 memory_manager.go:354] "RemoveStaleState removing state" podUID="f44e88ac-a936-4ad9-a919-05bc099aeb58" containerName="registry-server" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.276028 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.284205 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.433298 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl5fr\" (UniqueName: \"kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.433845 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.434045 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.535800 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.536001 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.536090 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl5fr\" (UniqueName: \"kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.536380 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.536512 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.566844 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl5fr\" (UniqueName: \"kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr\") pod \"redhat-marketplace-tzdv9\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:55 crc kubenswrapper[4763]: I1206 09:44:55.605362 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:44:56 crc kubenswrapper[4763]: I1206 09:44:56.076415 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:44:56 crc kubenswrapper[4763]: W1206 09:44:56.104333 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ce4a648_a851_4adf_85b6_ef84bdb1e89b.slice/crio-20a0c25eeb0c35d6f4be0c7bf1808ba937fbbe45ca1b706746c1e67d5e042dd4 WatchSource:0}: Error finding container 20a0c25eeb0c35d6f4be0c7bf1808ba937fbbe45ca1b706746c1e67d5e042dd4: Status 404 returned error can't find the container with id 20a0c25eeb0c35d6f4be0c7bf1808ba937fbbe45ca1b706746c1e67d5e042dd4 Dec 06 09:44:56 crc kubenswrapper[4763]: I1206 09:44:56.668574 4763 generic.go:334] "Generic (PLEG): container finished" podID="1ce4a648-a851-4adf-85b6-ef84bdb1e89b" containerID="b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43" exitCode=0 Dec 06 09:44:56 crc kubenswrapper[4763]: I1206 09:44:56.668649 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerDied","Data":"b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43"} Dec 06 09:44:56 crc kubenswrapper[4763]: I1206 09:44:56.668693 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerStarted","Data":"20a0c25eeb0c35d6f4be0c7bf1808ba937fbbe45ca1b706746c1e67d5e042dd4"} Dec 06 09:44:57 crc kubenswrapper[4763]: I1206 09:44:57.680009 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerStarted","Data":"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be"} Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.273836 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.277731 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.286336 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.389967 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8zh\" (UniqueName: \"kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.390146 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.390345 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.492656 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.492759 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8zh\" (UniqueName: \"kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.492893 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.493478 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.493634 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.530086 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8zh\" (UniqueName: \"kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh\") pod \"certified-operators-xz5d8\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.598087 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.700868 4763 generic.go:334] "Generic (PLEG): container finished" podID="1ce4a648-a851-4adf-85b6-ef84bdb1e89b" containerID="152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be" exitCode=0 Dec 06 09:44:58 crc kubenswrapper[4763]: I1206 09:44:58.701121 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerDied","Data":"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be"} Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.203074 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.716029 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerStarted","Data":"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4"} Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.720717 4763 generic.go:334] "Generic (PLEG): container finished" podID="f54aae5f-0730-4468-b9b3-25db688428d4" containerID="318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1" exitCode=0 Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.740503 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerDied","Data":"318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1"} Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.740552 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerStarted","Data":"8e696d9155afbae19806c6eed0d281b4ae519d732d219c2f8414748ca98e2750"} Dec 06 09:44:59 crc kubenswrapper[4763]: I1206 09:44:59.745307 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tzdv9" podStartSLOduration=2.248025751 podStartE2EDuration="4.745289948s" podCreationTimestamp="2025-12-06 09:44:55 +0000 UTC" firstStartedPulling="2025-12-06 09:44:56.671340972 +0000 UTC m=+5579.247046050" lastFinishedPulling="2025-12-06 09:44:59.168605209 +0000 UTC m=+5581.744310247" observedRunningTime="2025-12-06 09:44:59.73834134 +0000 UTC m=+5582.314046418" watchObservedRunningTime="2025-12-06 09:44:59.745289948 +0000 UTC m=+5582.320994986" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.155923 4763 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l"] Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.157721 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.167345 4763 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.167580 4763 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.180468 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l"] Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.234665 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.234818 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.234890 4763 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97f95\" (UniqueName: \"kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.336472 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.337008 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.337071 4763 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97f95\" (UniqueName: \"kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.337422 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.354537 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.357015 4763 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97f95\" (UniqueName: \"kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95\") pod \"collect-profiles-29416905-pvw9l\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.549748 4763 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:00 crc kubenswrapper[4763]: I1206 09:45:00.733760 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerStarted","Data":"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22"} Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.039432 4763 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l"] Dec 06 09:45:01 crc kubenswrapper[4763]: W1206 09:45:01.040091 4763 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc801ca85_0ebe_4fa6_a023_00f8fa940148.slice/crio-fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66 WatchSource:0}: Error finding container fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66: Status 404 returned error can't find the container with id fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66 Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.742092 4763 generic.go:334] "Generic (PLEG): container finished" podID="c801ca85-0ebe-4fa6-a023-00f8fa940148" containerID="0a66bd5c810bfc38279f2cf5af2cfbd8cd74157b6626f829897d9da7b6240cdc" exitCode=0 Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.742267 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" event={"ID":"c801ca85-0ebe-4fa6-a023-00f8fa940148","Type":"ContainerDied","Data":"0a66bd5c810bfc38279f2cf5af2cfbd8cd74157b6626f829897d9da7b6240cdc"} Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.742438 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" event={"ID":"c801ca85-0ebe-4fa6-a023-00f8fa940148","Type":"ContainerStarted","Data":"fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66"} Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.745244 4763 generic.go:334] "Generic (PLEG): container finished" podID="f54aae5f-0730-4468-b9b3-25db688428d4" containerID="589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22" exitCode=0 Dec 06 09:45:01 crc kubenswrapper[4763]: I1206 09:45:01.745392 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerDied","Data":"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22"} Dec 06 09:45:02 crc kubenswrapper[4763]: I1206 09:45:02.759056 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerStarted","Data":"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd"} Dec 06 09:45:02 crc kubenswrapper[4763]: I1206 09:45:02.794834 4763 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xz5d8" podStartSLOduration=2.338627639 podStartE2EDuration="4.794804004s" podCreationTimestamp="2025-12-06 09:44:58 +0000 UTC" firstStartedPulling="2025-12-06 09:44:59.723105959 +0000 UTC m=+5582.298811007" lastFinishedPulling="2025-12-06 09:45:02.179282334 +0000 UTC m=+5584.754987372" observedRunningTime="2025-12-06 09:45:02.776092338 +0000 UTC m=+5585.351797406" watchObservedRunningTime="2025-12-06 09:45:02.794804004 +0000 UTC m=+5585.370509062" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.174273 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.305119 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume\") pod \"c801ca85-0ebe-4fa6-a023-00f8fa940148\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.305165 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97f95\" (UniqueName: \"kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95\") pod \"c801ca85-0ebe-4fa6-a023-00f8fa940148\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.305235 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume\") pod \"c801ca85-0ebe-4fa6-a023-00f8fa940148\" (UID: \"c801ca85-0ebe-4fa6-a023-00f8fa940148\") " Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.307007 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume" (OuterVolumeSpecName: "config-volume") pod "c801ca85-0ebe-4fa6-a023-00f8fa940148" (UID: "c801ca85-0ebe-4fa6-a023-00f8fa940148"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.311487 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c801ca85-0ebe-4fa6-a023-00f8fa940148" (UID: "c801ca85-0ebe-4fa6-a023-00f8fa940148"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.311652 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95" (OuterVolumeSpecName: "kube-api-access-97f95") pod "c801ca85-0ebe-4fa6-a023-00f8fa940148" (UID: "c801ca85-0ebe-4fa6-a023-00f8fa940148"). InnerVolumeSpecName "kube-api-access-97f95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.408093 4763 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c801ca85-0ebe-4fa6-a023-00f8fa940148-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.408136 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97f95\" (UniqueName: \"kubernetes.io/projected/c801ca85-0ebe-4fa6-a023-00f8fa940148-kube-api-access-97f95\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.408147 4763 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c801ca85-0ebe-4fa6-a023-00f8fa940148-config-volume\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.774335 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" event={"ID":"c801ca85-0ebe-4fa6-a023-00f8fa940148","Type":"ContainerDied","Data":"fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66"} Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.774373 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416905-pvw9l" Dec 06 09:45:03 crc kubenswrapper[4763]: I1206 09:45:03.774380 4763 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fea6cf9840bb66505fc9b3b5356a5708585b49ea843e15af7627de0954c6fd66" Dec 06 09:45:04 crc kubenswrapper[4763]: I1206 09:45:04.255333 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9"] Dec 06 09:45:04 crc kubenswrapper[4763]: I1206 09:45:04.269010 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416860-hcfb9"] Dec 06 09:45:05 crc kubenswrapper[4763]: I1206 09:45:05.606380 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:05 crc kubenswrapper[4763]: I1206 09:45:05.606813 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:05 crc kubenswrapper[4763]: I1206 09:45:05.656053 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:06 crc kubenswrapper[4763]: I1206 09:45:06.081675 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e936afc1-13a4-430a-be88-db8d932387e1" path="/var/lib/kubelet/pods/e936afc1-13a4-430a-be88-db8d932387e1/volumes" Dec 06 09:45:06 crc kubenswrapper[4763]: I1206 09:45:06.141275 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:07 crc kubenswrapper[4763]: I1206 09:45:07.461333 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.103937 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tzdv9" podUID="1ce4a648-a851-4adf-85b6-ef84bdb1e89b" containerName="registry-server" containerID="cri-o://d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4" gracePeriod=2 Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.598778 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.598880 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.662687 4763 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.793628 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.954060 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl5fr\" (UniqueName: \"kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr\") pod \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.954212 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content\") pod \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.954324 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities\") pod \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\" (UID: \"1ce4a648-a851-4adf-85b6-ef84bdb1e89b\") " Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.955543 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities" (OuterVolumeSpecName: "utilities") pod "1ce4a648-a851-4adf-85b6-ef84bdb1e89b" (UID: "1ce4a648-a851-4adf-85b6-ef84bdb1e89b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.962245 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr" (OuterVolumeSpecName: "kube-api-access-xl5fr") pod "1ce4a648-a851-4adf-85b6-ef84bdb1e89b" (UID: "1ce4a648-a851-4adf-85b6-ef84bdb1e89b"). InnerVolumeSpecName "kube-api-access-xl5fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:45:08 crc kubenswrapper[4763]: I1206 09:45:08.988701 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ce4a648-a851-4adf-85b6-ef84bdb1e89b" (UID: "1ce4a648-a851-4adf-85b6-ef84bdb1e89b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.056792 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.056846 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl5fr\" (UniqueName: \"kubernetes.io/projected/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-kube-api-access-xl5fr\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.056860 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce4a648-a851-4adf-85b6-ef84bdb1e89b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.119835 4763 generic.go:334] "Generic (PLEG): container finished" podID="1ce4a648-a851-4adf-85b6-ef84bdb1e89b" containerID="d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4" exitCode=0 Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.119936 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerDied","Data":"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4"} Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.120033 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzdv9" event={"ID":"1ce4a648-a851-4adf-85b6-ef84bdb1e89b","Type":"ContainerDied","Data":"20a0c25eeb0c35d6f4be0c7bf1808ba937fbbe45ca1b706746c1e67d5e042dd4"} Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.120057 4763 scope.go:117] "RemoveContainer" containerID="d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.119961 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzdv9" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.159171 4763 scope.go:117] "RemoveContainer" containerID="152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.184950 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.192154 4763 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.195864 4763 scope.go:117] "RemoveContainer" containerID="b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.198613 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzdv9"] Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.250565 4763 scope.go:117] "RemoveContainer" containerID="d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4" Dec 06 09:45:09 crc kubenswrapper[4763]: E1206 09:45:09.251232 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4\": container with ID starting with d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4 not found: ID does not exist" containerID="d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.251278 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4"} err="failed to get container status \"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4\": rpc error: code = NotFound desc = could not find container \"d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4\": container with ID starting with d4bc5a007689dec89a70a83010cd3eea0af53f81c2466326fdb16e88cbbaa9b4 not found: ID does not exist" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.251320 4763 scope.go:117] "RemoveContainer" containerID="152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be" Dec 06 09:45:09 crc kubenswrapper[4763]: E1206 09:45:09.251685 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be\": container with ID starting with 152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be not found: ID does not exist" containerID="152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.251723 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be"} err="failed to get container status \"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be\": rpc error: code = NotFound desc = could not find container \"152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be\": container with ID starting with 152ee2924b56a2d4d05ef49178735881e4fd950f2612c4e010208232cb7804be not found: ID does not exist" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.251744 4763 scope.go:117] "RemoveContainer" containerID="b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43" Dec 06 09:45:09 crc kubenswrapper[4763]: E1206 09:45:09.252296 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43\": container with ID starting with b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43 not found: ID does not exist" containerID="b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.252356 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43"} err="failed to get container status \"b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43\": rpc error: code = NotFound desc = could not find container \"b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43\": container with ID starting with b05283ebb466f0d1437523ad13364d895b7ac73bdbe6f2bc0f96c3de37cbde43 not found: ID does not exist" Dec 06 09:45:09 crc kubenswrapper[4763]: I1206 09:45:09.729609 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce4a648-a851-4adf-85b6-ef84bdb1e89b" path="/var/lib/kubelet/pods/1ce4a648-a851-4adf-85b6-ef84bdb1e89b/volumes" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.079872 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.138802 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xz5d8" podUID="f54aae5f-0730-4468-b9b3-25db688428d4" containerName="registry-server" containerID="cri-o://eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd" gracePeriod=2 Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.704909 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.837675 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities\") pod \"f54aae5f-0730-4468-b9b3-25db688428d4\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.838556 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities" (OuterVolumeSpecName: "utilities") pod "f54aae5f-0730-4468-b9b3-25db688428d4" (UID: "f54aae5f-0730-4468-b9b3-25db688428d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.838692 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8zh\" (UniqueName: \"kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh\") pod \"f54aae5f-0730-4468-b9b3-25db688428d4\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.838768 4763 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content\") pod \"f54aae5f-0730-4468-b9b3-25db688428d4\" (UID: \"f54aae5f-0730-4468-b9b3-25db688428d4\") " Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.839601 4763 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-utilities\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.864743 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh" (OuterVolumeSpecName: "kube-api-access-jc8zh") pod "f54aae5f-0730-4468-b9b3-25db688428d4" (UID: "f54aae5f-0730-4468-b9b3-25db688428d4"). InnerVolumeSpecName "kube-api-access-jc8zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.898647 4763 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f54aae5f-0730-4468-b9b3-25db688428d4" (UID: "f54aae5f-0730-4468-b9b3-25db688428d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.942100 4763 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8zh\" (UniqueName: \"kubernetes.io/projected/f54aae5f-0730-4468-b9b3-25db688428d4-kube-api-access-jc8zh\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:11 crc kubenswrapper[4763]: I1206 09:45:11.942142 4763 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54aae5f-0730-4468-b9b3-25db688428d4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.150532 4763 generic.go:334] "Generic (PLEG): container finished" podID="f54aae5f-0730-4468-b9b3-25db688428d4" containerID="eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd" exitCode=0 Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.150575 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerDied","Data":"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd"} Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.150601 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xz5d8" event={"ID":"f54aae5f-0730-4468-b9b3-25db688428d4","Type":"ContainerDied","Data":"8e696d9155afbae19806c6eed0d281b4ae519d732d219c2f8414748ca98e2750"} Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.150618 4763 scope.go:117] "RemoveContainer" containerID="eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.150745 4763 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xz5d8" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.188767 4763 scope.go:117] "RemoveContainer" containerID="589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.189463 4763 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.202656 4763 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xz5d8"] Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.228205 4763 scope.go:117] "RemoveContainer" containerID="318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.263783 4763 scope.go:117] "RemoveContainer" containerID="eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd" Dec 06 09:45:12 crc kubenswrapper[4763]: E1206 09:45:12.264299 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd\": container with ID starting with eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd not found: ID does not exist" containerID="eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.264345 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd"} err="failed to get container status \"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd\": rpc error: code = NotFound desc = could not find container \"eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd\": container with ID starting with eda37a7b16549efe06d2e4974eb992e9dbde70c81de8988323ab0e834768d6bd not found: ID does not exist" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.264373 4763 scope.go:117] "RemoveContainer" containerID="589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22" Dec 06 09:45:12 crc kubenswrapper[4763]: E1206 09:45:12.265067 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22\": container with ID starting with 589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22 not found: ID does not exist" containerID="589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.265313 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22"} err="failed to get container status \"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22\": rpc error: code = NotFound desc = could not find container \"589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22\": container with ID starting with 589d98e6648f673b4e344765da1f848ce225c11877a1bbd7c1f0d090750dbc22 not found: ID does not exist" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.265407 4763 scope.go:117] "RemoveContainer" containerID="318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1" Dec 06 09:45:12 crc kubenswrapper[4763]: E1206 09:45:12.265819 4763 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1\": container with ID starting with 318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1 not found: ID does not exist" containerID="318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1" Dec 06 09:45:12 crc kubenswrapper[4763]: I1206 09:45:12.265853 4763 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1"} err="failed to get container status \"318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1\": rpc error: code = NotFound desc = could not find container \"318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1\": container with ID starting with 318864a3630f2aaa20944d6fca5b2b63238dc9e68f92e1268725f6df91d0aaa1 not found: ID does not exist" Dec 06 09:45:13 crc kubenswrapper[4763]: I1206 09:45:13.731973 4763 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f54aae5f-0730-4468-b9b3-25db688428d4" path="/var/lib/kubelet/pods/f54aae5f-0730-4468-b9b3-25db688428d4/volumes" Dec 06 09:45:25 crc kubenswrapper[4763]: I1206 09:45:25.413449 4763 scope.go:117] "RemoveContainer" containerID="ccecd7a313719e422608b01311571225d0f7e554677fbb7c1ebfae0bcdacbc0c" Dec 06 09:46:42 crc kubenswrapper[4763]: I1206 09:46:42.537832 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:46:42 crc kubenswrapper[4763]: I1206 09:46:42.538625 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:47:12 crc kubenswrapper[4763]: I1206 09:47:12.536942 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:47:12 crc kubenswrapper[4763]: I1206 09:47:12.537513 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.537049 4763 patch_prober.go:28] interesting pod/machine-config-daemon-np59r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.539060 4763 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.539117 4763 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-np59r" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.540108 4763 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2"} pod="openshift-machine-config-operator/machine-config-daemon-np59r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.540330 4763 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" containerName="machine-config-daemon" containerID="cri-o://7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" gracePeriod=600 Dec 06 09:47:42 crc kubenswrapper[4763]: E1206 09:47:42.665160 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.998984 4763 generic.go:334] "Generic (PLEG): container finished" podID="75bb7009-deac-407d-901d-035c51914a8a" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" exitCode=0 Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.999027 4763 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-np59r" event={"ID":"75bb7009-deac-407d-901d-035c51914a8a","Type":"ContainerDied","Data":"7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2"} Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.999056 4763 scope.go:117] "RemoveContainer" containerID="a37e740357b3afa0efe7f382aeab0e49d44f3d51cf8cee5d8cd16ba366adfefc" Dec 06 09:47:42 crc kubenswrapper[4763]: I1206 09:47:42.999831 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:47:43 crc kubenswrapper[4763]: E1206 09:47:43.000092 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:47:54 crc kubenswrapper[4763]: I1206 09:47:54.721729 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:47:54 crc kubenswrapper[4763]: E1206 09:47:54.724111 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:48:09 crc kubenswrapper[4763]: I1206 09:48:09.719376 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:48:09 crc kubenswrapper[4763]: E1206 09:48:09.720317 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:48:23 crc kubenswrapper[4763]: I1206 09:48:23.720170 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:48:23 crc kubenswrapper[4763]: E1206 09:48:23.721072 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:48:35 crc kubenswrapper[4763]: I1206 09:48:35.719618 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:48:35 crc kubenswrapper[4763]: E1206 09:48:35.720446 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" Dec 06 09:48:49 crc kubenswrapper[4763]: I1206 09:48:49.719475 4763 scope.go:117] "RemoveContainer" containerID="7ef09fa644d45c531ea335b8c2a65adb0688ca2d29c18a70709bbb84b9b533c2" Dec 06 09:48:49 crc kubenswrapper[4763]: E1206 09:48:49.721161 4763 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-np59r_openshift-machine-config-operator(75bb7009-deac-407d-901d-035c51914a8a)\"" pod="openshift-machine-config-operator/machine-config-daemon-np59r" podUID="75bb7009-deac-407d-901d-035c51914a8a" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114776010024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114776010017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114762127016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114762127015463 5ustar corecore